def wrapper(*args, **kwargs): custom_dir = join(dirname(dirname(__file__)), 'fabsetup_custom') presetting_dir = join(dirname(dirname(__file__)), 'fabfile_data', 'presetting_fabsetup_custom') if not isdir(custom_dir): print(yellow('\n** ** Init ') + yellow('fabsetup_custom', bold=True) + yellow(' ** **\n')) print(yellow('** Create files in dir fabsetup_custom **')) local(flo('mkdir -p {custom_dir}')) local(flo('cp -r --no-clobber {presetting_dir}/. {custom_dir}')) else: with quiet(): local(flo('cp -r --no-clobber {presetting_dir}/. {custom_dir}')) if not isdir(join(custom_dir, '.git')): print(yellow('\n** Git repo fabsetup_custom: init and first commit **')) local(flo('cd {custom_dir} && git init')) local(flo('cd {custom_dir} && git add .')) local(flo('cd {custom_dir} && git commit -am "Initial commit"')) print(yellow("** Done. Don't forget to create a backup of your fabsetup_custom repo **\n")) print(yellow("** But do not make it public, it's custom **\n", bold=True)) else: with quiet(): cmd = flo('cd {custom_dir} && git status --porcelain') res = local(cmd, capture=True) if res: print(yellow('\n** git repo fabsetup_custom has uncommitted changes: **')) print(cmd) print(yellow(res, bold=True)) print(yellow("** Don't forget to commit them and make a backup of your repo **\n")) return func(*args, **kwargs)
def component_buildout(components='*', profile='production.cfg'): """Deploy local production ${components} buildouts""" # if we get asterix - iterate on all commands with quiet(): local_buildout = local('pwd', capture=True) if components == '*': with quiet(): with lcd('components'): output = local('ls -d */', capture=True) components = [a.strip('/') for a in output.split()] else: components = [components] for component in components: opts = {'component_dir': '%s/components/%s' % (local_buildout, component), 'profile': profile} if not console.confirm("Do you want to launch " "buildout in %(component_dir)s" % opts): continue with lcd('%(component_dir)s' % opts): # symlink profile local('test -f buildout.cfg || ln -s ./profiles/%(profile)s buildout.cfg' % opts) # noqa # bootstrap local('../../bin/python bootstrap.py') # run buildout local('./bin/buildout -N')
def wrapper(*args, **kwargs): if not os.path.exists(FABSETUP_CUSTOM_DIR): msg = '''\ Git repository ~/.fabsetup-custom with configurations does not exist. This configs are required to use fabsetup. Clone it if you already have your own fabsetup-custom repository: git clone <user>@<hostname>:/path/to/fabsetup-custom.git ~/.fabetup-custom Else, initialize a new repository. Init a new repository `~/.fabsetup-custom`?''' if not query_yes_no(msg, default='yes'): sys.exit('abort') custom_dir = FABSETUP_CUSTOM_DIR presetting_dir = join(FABFILE_DATA_DIR, 'presetting-fabsetup-custom') if not isdir(custom_dir): print(yellow('\n** ** Init ') + yellow('~/.fabsetup-custom', bold=True) + yellow(' ** **\n')) print(yellow(flo('** Create files in dir {custom_dir} **'))) local(flo('mkdir -p {custom_dir}')) local(flo('cp -r --no-clobber {presetting_dir}/. {custom_dir}')) import_fabsetup_custom(globals()) else: with quiet(): local(flo( 'cp -r --no-clobber {presetting_dir}/. {custom_dir}')) if not isdir(join(custom_dir, '.git')): print(yellow( '\n** Git repo ~/.fabsetup-custom: ' 'init and first commit **')) local(flo('cd {custom_dir} && git init')) local(flo('cd {custom_dir} && git add .')) local(flo('cd {custom_dir} && git commit -am "Initial commit"')) print(yellow("** Done. Don't forget to create a backup of your " '~/.fabsetup-custom repo **\n')) print(yellow("** But do not make it public, it's custom **\n", bold=True)) else: with quiet(): cmd = flo('cd {custom_dir} && git status --porcelain') res = local(cmd, capture=True) if res: print(yellow('\n** git repo ') + magenta('~/.fabsetup-custom ') + yellow('has uncommitted changes: **')) print(cmd) print(yellow(res, bold=True)) print(yellow( "** Don't forget to commit them and make a " "backup of your repo **\n")) return func(*args, **kwargs)
def _add_ansible_user(self): """ create ansible user """ with quiet(): sudo("useradd ansible -m -s /bin/bash") sudo("echo 'ansible ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers") with quiet(): sudo("mkdir /home/ansible/.ssh/", user="******") put(os.path.join(self.local_dir, self.authorized_key), '/home/ansible/.ssh/authorized_keys2', mode=0600, use_sudo=True) local("chmod 600 %s" % os.path.join(self.local_dir, self.ssh_key)) sudo("chown ansible /home/ansible/.ssh/authorized_keys2")
def _git_status(): """Checks if the local git repo is ahead of the remote""" with quiet(): result = local("git status", capture=True).lower() if _contains(result, ("untracked files:", "nothing added to commit but untracked files")): print yellow( "Your git repo has untracked files that are being ignored.") if _contains(result, ("deleted:", "added:", "modified:", "renamed:")): print red("Your git repo has uncommitted changes.") return "dirty" if _contains(result, ("changes not staged for commit", )): print red("Your git repo has uncommitted changes.") return "dirty" if _contains(result, ("your branch is ahead", )): print red("Your git repo is ahead of the remote.") return "ahead" if _contains(result, ("nothing to commit", )): print green("Your git repo seems to be syncronized.") return "clean"
def fetch_db(destination='.'): """ Dump the database on the remote host and retrieve it locally. The destination parameter controls where the dump should be stored locally. """ with nested(cd(env.project_root), quiet()): db_credentials = run('cat envdir/DATABASE_URL') db_credentials_dict = dj_database_url.parse(db_credentials) if not is_supported_db_engine(db_credentials_dict['ENGINE']): raise NotImplementedError( "The fetch_db task only supports postgresql databases" ) outfile = datetime.now().strftime('%Y-%m-%d_%H%M%S.sql.gz') outfile_remote = os.path.join('~', outfile) with shell_env(PGPASSWORD=db_credentials_dict['PASSWORD'].replace('$', '\$')): run('pg_dump -O -x -h {host} -U {user} {db}|gzip > {outfile}'.format( host=db_credentials_dict['HOST'], user=db_credentials_dict['USER'], db=db_credentials_dict['NAME'], outfile=outfile_remote)) get(outfile_remote, destination) run('rm %s' % outfile_remote) return outfile
def test_quiet_sets_warn_only_to_true(self): # Sanity test to ensure environment with settings(warn_only=False): with quiet(): eq_(run("barf").return_code, 1) # Kwarg test eq_(run("barf", quiet=True).return_code, 1)
def start(): is_master = run('hostname') == MASTER_HOST with cd(LOCUST_DIR): with quiet(): run('bin/supervisorctl -c {0} stop all'.format(sv_conf)) prog = is_master and 'locust-master' or 'locust-slave' run('bin/supervisorctl -c {0} start {1}'.format(sv_conf, prog))
def deploy(): """ Deploy to AWS. """ service_name = '{}-{}'.format(env.appname, env.stage) with quiet(): aws_vault( 'aws ecr create-repository --repository-name {}'.format(service_name), ) image_url = aws_vault( 'aws ecr describe-repositories ' '--repository-name {} ' '--output=text ' '--query=repositories[0].repositoryUri'.format( service_name ), capture=True, ) docker_login_command = aws_vault('aws ecr get-login --no-include-email', capture=True) local(docker_login_command) # Build with a fresh environment to avoid uncommitted files or cruft local( 'git archive HEAD | docker buildx build --push --platform={} --tag={} -'.format( env.arch, image_url, ) ) aws_vault('npm run serverless -- deploy --stage {}'.format(env.stage))
def dealias_build(name): if name in ("live", "stage"): name = "_%s" % name path = "%s/deploy/builds/%s" % (ROOT_DIR, name) with quiet(): return os.path.basename( docker_exec("cd %s && pwd -P" % path, capture=True))
def dump_db(destination): """ Dump the database to the given directory and return the path to the file created. This creates a gzipped SQL file. """ with cd(get_project_root()), quiet(): db_credentials = run("cat envdir/DATABASE_URL") db_credentials_dict = dj_database_url.parse(db_credentials) if not is_supported_db_engine(db_credentials_dict["ENGINE"]): raise NotImplementedError( "The dump_db task doesn't support the remote database engine") outfile = Path( destination, datetime.now().strftime("%Y-%m-%d_%H%M%S.sql.gz")).as_posix() with shell_env( PGPASSWORD=db_credentials_dict["PASSWORD"].replace("$", "\$")): run("pg_dump -O -x -h {host} -U {user} {db}|gzip > {outfile}".format( host=db_credentials_dict["HOST"], user=db_credentials_dict["USER"], db=db_credentials_dict["NAME"], outfile=outfile, )) return outfile
def leveldb(): """Locally builds and install leveldb system-wide""" with lcd('/tmp'): with quiet(): local('mkdir leveldb_install') with lcd('leveldb_install'): local( 'svn checkout http://snappy.googlecode.com/svn/trunk/ snappy-read-only' ) with lcd('snappy-read-only'): local( './autogen.sh && ./configure --enable-shared=no --enable-static=yes' ) local("make clean && make CXXFLAGS='-g -O2 -fPIC'") local( 'git clone https://code.google.com/p/leveldb/ || (cd leveldb; git pull)' ) with lcd('leveldb'): local('make clean') local( "make LDFLAGS='-L../snappy-read-only/.libs/ -Bstatic -lsnappy -shared' " "OPT='-fPIC -O2 -DNDEBUG -DSNAPPY -I../snappy-read-only' " "SNAPPY_CFLAGS='' ") sudo('cp -rf /tmp/leveldb_install/leveldb/libleveldb.so* /usr/local/lib') sudo( 'cp -rf /tmp/leveldb_install/leveldb/include/leveldb /usr/local/include' ) local('rm -rf /tmp/leveldb_install')
def update_or_append_line(filename, prefix, new_line, keep_backup=True, append=True): '''Search in file 'filename' for a line starting with 'prefix' and replace the line by 'new_line'. If a line starting with 'prefix' not exists 'new_line' will be appended. If the file not exists, it will be created. Return False if new_line was appended, else True (i.e. if the prefix was found within of the file). ''' result = None if env.host_string == 'localhost': result = update_or_append_local(filename, prefix, new_line, keep_backup, append) else: tmp_dir = tempfile.mkdtemp(suffix='', prefix='fabsetup_') # fabric.api.local(flo('chmod 777 {tmp_dir}')) local_path = os.path.join(tmp_dir, os.path.basename(filename)) fabric.operations.get(remote_path=filename, local_path=local_path, use_sudo=True, temp_dir='/tmp') result = update_or_append_local(local_path, prefix, new_line, keep_backup, append) put(local_path, remote_path=filename, use_sudo=True, temp_dir='/tmp') with quiet(): fabric.api.local(flo('rm -rf {tmp_dir}')) return result
def _non_installed(packages): non_installed = [] with quiet(): for pkg in packages: if run(flo('dpkg --status {pkg}')).return_code != 0: non_installed.append(pkg) return non_installed
def fetch_db(destination='.'): """ Dump the database on the remote host and retrieve it locally. The destination parameter controls where the dump should be stored locally. """ require('root') with nested(cd(get_project_root()), quiet()): db_credentials = run('cat envdir/DATABASE_URL') db_credentials_dict = dj_database_url.parse(db_credentials) if not is_supported_db_engine(db_credentials_dict['ENGINE']): raise NotImplementedError( "The fetch_db task doesn't support the remote database engine") outfile = datetime.now().strftime('%Y-%m-%d_%H%M%S.sql.gz') outfile_remote = os.path.join('~', outfile) with shell_env( PGPASSWORD=db_credentials_dict['PASSWORD'].replace('$', '\$')): run('pg_dump -O -x -h {host} -U {user} {db}|gzip > {outfile}'.format( host=db_credentials_dict['HOST'], user=db_credentials_dict['USER'], db=db_credentials_dict['NAME'], outfile=outfile_remote)) get(outfile_remote, destination) run('rm %s' % outfile_remote) return outfile
def get_environment_variables(prefix='APP_'): """ Get bash environment variables and set variables required by fabric, gives us access to the following variables by putting them in fabrics `env` release_dir # The location of our releases current_release # The path to the current release previous_release # The path to the previous release db_name # The database name db_user # The database user db_host # The database host db_pass # The database password mage_src # The Location of Magento's source media_dir # The Location of our media folder var_dir # The Location of our var folder unix_user # The applications user unix_group # The applications group """ with quiet(): env_vars = run('printenv | grep {}'.format(prefix)) env_vars_list = env_vars.split() for var in env_vars_list: key, value = var.split('=') key = key.replace(prefix, '') key = key.lower() env[key] = value
def rollback_version(remote=True): """The actual rollback work.""" smart_run = run if remote else local # Get the final target version target_version = version if target_version.startswith(LATEST_FLAG): mapping = get_versions_alias_mapping(path, remote) if target_version in mapping: target_version = mapping[target_version] # Assure that the target path does exist target_path = os.path.join(path, target_version) with quiet(): exists = smart_run('test -e %s' % target_path).succeeded if not exists: raise SystemExit( 'Error: No version named `{0}` exists in {1}, nor does ' 'a version have the alias `{0}`'.format(version, path) ) # Overwrite the symlink for the newly specified # distribution to make it available serve_path = os.path.join(path, 'current') smart_run('ln -sfn %s %s' % (target_path, serve_path)) # Run the post-install command if specified if post_command: run(post_command)
def set_local_configuration(): env.compose_files = ['docker-compose.yml', 'docker-compose.worker.yml'] env.dinghy = False env.power_shell = False env.user_id = 1000 with quiet(): try: docker_kernel = "%s" % local('docker version --format "{{.Server.KernelVersion}}"', capture=True) except: docker_kernel = '' if platform == "darwin" and docker_kernel.find('linuxkit') != -1: env.dinghy = True elif platform in ["win32", "win64"]: env.power_shell = True # Python can't set the vars correctly on PowerShell and local() always calls cmd.exe shellProjectName = local('echo %PROJECT_NAME%', capture=True) if (shellProjectName != env.project_name): domains = '`' + '`, `'.join([env.root_domain] + env.extra_domains) + '`' print 'You must manually set environment variables on Windows:' print '$Env:PROJECT_NAME="%s"' % env.project_name print '$Env:PROJECT_DIRECTORY="%s"' % env.project_directory print '$Env:PROJECT_HOSTNAMES="%s"' % env.project_hostnames print '$Env:PROJECT_DOMAINS="%s"' % domains raise SystemError('Env vars not set (Windows detected)') if not env.power_shell: env.user_id = int(local('id -u', capture=True)) if env.user_id > 256000: env.user_id = 1000 env.root_dir = os.path.dirname(os.path.abspath(__file__))
def remote_origin_configured(basedir): entry_exists = False with quiet(): entry_exists = run( flo('''grep '\[remote "origin"\]' ''' '{basedir}/.git/config')).return_code == 0 return entry_exists
def latest_release(self): """Return latest known release # """ # Get current branch with cd(self.local_dir): current_branch = self.current_branch if current_branch == 'master': branch_ext = "" else: branch_ext = '-%s' % current_branch # Fetch branches (locally, and sorted) local("git fetch", capture=True) cmd = (r"git tag --sort version:refname|" r"grep 'v[0-9]\{8\}[a-z]\?%s$'") with quiet(): tags = local(cmd % branch_ext, capture=True) if tags.failed: self._raise_no_valid_tag() else: tag = tags.split("\n")[-1] if tag: print blue("Latest release tag is"), yellow(tag) return tag else: self._raise_no_valid_tag()
def proxy(): """ Install docker proxy based on http://dockerflow.com """ click.echo("Configuring proxy...") with quiet(): run("mkdir -p %s/proxy" % env.path) run("docker network create -d overlay proxy") # Volumes should be deleted manually run("docker volume create le-certs") run("docker volume create dfp-certs") proxy_file = resource_filename(Requirement.parse("suarm"), "suarm/tmpl/swarm_proxy.yml") with cd('%s/proxy/' % env.path): upload_template( filename=proxy_file, destination='%s/proxy/proxy.yml' % env.path, template_dir="./", use_sudo=True, ) with settings(hide('warnings'), warn_only=True): # run("docker network ls | grep proxy | awk '{print $1}' | xargs docker network rm") # hide('warnings', 'running', 'stdout', 'stderr'), run("docker stack deploy --compose-file %s/proxy/proxy.yml proxy" % env.path) click.echo("---> Proxy has been installed... :)")
def update_if_necessary(): with quiet(): latest_version = fabric_local("git tag --sort=v:refname -l \"v*\" | tail -n -1", capture=True) with cd(env.project_path): current_version = run("git describe --tags --always") if latest_version and current_version != latest_version: update(latest_version)
def _web_pid(): """Get the pid of the web process""" with quiet(): with _virtualenv(): env.run('python manage.py supervisor getconfig > local/.tmpsupervisord.conf') pid = env.run('supervisorctl -c local/.tmpsupervisord.conf pid web', capture=True) env.run('rm local/.tmpsupervisord.conf') return pid
def run_IReS_examples(): with quiet(): start_IReS() with cd("%s/asap-platform/asap-client" % IRES_HOME): for eg in ("TestOperators", "TestWorkflows", "TestWorkflowsIMR"): with warn_only(): run("mvn exec:java -Dexec.mainClass=" "\"gr.ntua.cslab.asap.examples.%s\"" % eg)
def docker_down(container_id: str) -> None: """ kills the docker container params: string container: docker id of the container to stop """ with settings(quiet()): local('docker kill %s' % container_id)
def home(): """ returns string path to home directory. """ with quiet(): if env.env_id is 'local': return os.getenv('HOME', '') else: return run('echo $HOME', warn_only=True)
def docker_rm(container): """ removes a docker container params: string container: docker id of the container to remove """ with settings(quiet()): local('docker rm --force %s' % container)
def ls(path): """ Return the list of the files in the given directory, omitting . and ... """ with cd(path), quiet(): files = run("for i in *; do echo $i; done") files_list = files.replace("\r", "").split("\n") return files_list
def get_currently_installed_version(): """ Return the currently installed version (tag) by reading the contents of the VERSION file, or None if the VERSION file could not be read. """ with nested(cd(env.project_root), quiet()): version = run("cat VERSION") return version if version.succeeded else None
def _get_release_tag(self, release_number): """ Return release tag according to root and release """ local("git fetch") cmd = "git tag --sort version:refname -l %s" % release_number with quiet(): tag = local(cmd, capture=True) return tag if tag.failed: self._raise_no_valid_tag()
def check_root(): """Verifies that the current user is root""" with quiet(): if run('id -u 2> /dev/null') != '0': if sudo('id -u 2> /dev/null', warn_only=True) != '0': print """ WARNING: you need to run this script as root or with sudo if you want to take advantage of all the luchizz features.""" sys.exit(1)
def ls(path): """ Return the list of the files in the given directory, omitting . and ... """ with cd(path), quiet(): files = run('for i in *; do echo $i; done') files_list = files.replace('\r', '').split('\n') return files_list
def file_exists(path): """ Checks if the given path exists on the host and returns True if that's the case, False otherwise. """ with quiet(): exists = api.run('test -e {path}'.format(path=path)).succeeded return exists
def compile(): compiled = False with quiet(): compiled = local("make clean; make -j8", capture=True).succeeded if not compiled: with settings(warn_only=True): compiled = local("make -j8") # Print compilation errors if not compiled: with color("error"): puts("ERROR: cannot compile code!", show_prefix=True)
def docker_login(): login_prompts = { 'Username: '******'Password: '******'docker logout {CI_REGISTRY}') with quiet(): sudo(f'docker login {CI_REGISTRY}')
def docker_deploy(): require('ci_env_vars') sudo(f'docker image pull {CI_REGISTRY_IMAGE}:{CI_COMMIT_REF_NAME}') with quiet(): with cd(env.app_dir): with shell_env(**env.ci_env_vars): sudo('docker-compose down') sudo('docker-compose up -d')
def delete_user(self, username): """ Remove a user, given a username """ with quiet(): user = [user for user in self.users if user.name == username] if user: run('sed -i \'/{}/d\' {}'.format(user, self.key_file)) return True return False
def disable_service(self, service_name): """Temporarily disable a service using systemd.""" with self.manager_env_fabric() as fabric: fabric.sudo('systemctl stop {0}'.format(service_name)) try: yield finally: with self.manager_env_fabric() as fabric, quiet(): fabric.sudo('systemctl start {0}'.format(service_name))
def compile(): compiled = False with quiet(): compiled = local("make clean; make -j8",capture=True).succeeded if not compiled: with settings(warn_only=True): compiled = local("make -j8") # Print compilation errors if not compiled: with color("error"): puts("ERROR: cannot compile code!",show_prefix=True)
def build(): with quiet(): hasOldImage = len(api.local(IMAGE_FIND, capture=True)) > 0 if hasOldImage: api.local(IMAGE_TAG_OLD) api.local(IMAGE_BUILD) if hasOldImage: api.local(IMAGE_RM_OLD)
def _bootstrap_ubuntu_essential(self): """ install essential pkg for bootstraping """ with quiet(): ret = sudo("ls") if ret.failed: run("apt-get install -y sudo") # (see # http://docs.saltstack.com/en/latest/topics/installation/ubuntu.html) sudo("apt-get update -y") for pkg in self.generic_bootstrap['apt']: sudo("apt-get install -y %s" % pkg)
def get_mac(c): with quiet(): # Give the ARP cache a chance to be populated ping() result = local("arp %s" % (env.host_string,), capture=True) if "incomplete" in result.stdout or \ "no entry" in result.stdout: return None elif "Unknown host" in result.stderr: return None else: return result.stdout.split(" ")[3]
def run_with_retry(command, attempts): """Runs the specified command, trying up to 'attempts' times if it fails. Returns the final result from fabric.abi.run().""" with quiet(): for retries in range(attempts): result = run(command) if result.succeeded: break else: time.sleep(1) return result
def ls(path): """ Return the list of the files in the given directory, omitting . and ... Arguments: path -- The path of the directory to get the files from """ with nested(api.cd(path), quiet()): files = api.run('for i in *; do echo $i; done') files_list = files.replace('\r', '').split('\n') return files_list
def _web_pid(): """Get the pid of the web process""" with quiet(): with _virtualenv(): env.run( 'python manage.py supervisor getconfig > local/.tmpsupervisord.conf' ) pid = env.run( 'supervisorctl -c local/.tmpsupervisord.conf pid web', capture=True) env.run('rm local/.tmpsupervisord.conf') return pid
def add_user(self, user): """ Add a user to a server using the given a valid user object """ with quiet(): added = False if user.hash not in self.hashes: run('echo "{}" >> {}'.format(user.full_key, self.key_file)) added = True return added
def find_pkgs(): with quiet(): out = run('rpm -qa | grep vdsm') res = [] for desc in [ line.strip() for line in out.split('\n') ]: try: pkg, tag, arch = desc.rsplit('.', 2) name, ver, rev = pkg.rsplit('-', 2) res.append((name, ver, rev, arch)) except (IndexError, ValueError): return [] return res
def list_users(): """ Read the contents of a servers authorized_keys file e.g: fab list_users --hosts [email protected] """ with quiet(): keyfile = KeysFile() print(green('\n================== {}:'.format(env.host_string))) for user in keyfile.users: print(blue('\t ' + user.name))
def setup(): with cd(LOCUST_DIR): for d in DIRS: run('mkdir -pv {0}'.format(d)) put('loadtest.py', 'loadtest.py') upload_template(sv_fname, sv_fname, context=context(), backup=False, use_jinja=True) with quiet(): run('bin/supervisorctl -c {0} stop all'.format(sv_conf)) run('bin/supervisorctl -c {0} shutdown'.format(sv_conf)) time.sleep(2) run('bin/supervisord -c {0}'.format(sv_conf))
def docker_login(): require('docker_username', 'docker_password') sudo(f'docker logout {env.docker_registry}') login_prompts = { 'Username: '******'Password: '******'docker logout {env.docker_registry}') with quiet(): sudo(f'docker login {env.docker_registry}')
def vagrant_up(image): """ runs a vagrant instance params: string image: name of the docker image """ with quiet(): vagrant_destroy() with settings(hide('stdout')): local('vagrant init %s' % image) local('vagrant box update') local('vagrant up')
def test_quiet_hides_all_output(self): # Sanity test - normally this is not empty run("ls /simple") ok_(sys.stdout.getvalue()) # Reset sys.stdout = StringIO() # Real test with quiet(): run("ls /simple") # Empty output ok_(not sys.stdout.getvalue()) # Reset sys.stdout = StringIO() # Kwarg test run("ls /simple", quiet=True) ok_(not sys.stdout.getvalue())