def test_init__skip_same_data(self, tmpdir, capsys): ctx = MockContext() project_dir = Path(str(tmpdir)) project_build_dir = project_dir/"build" build_config = BuildConfig(cmake_generator="NINJA") cmake_project1 = CMakeProject(ctx, project_dir, project_build_dir, build_config) assert not project_build_dir.isdir() # -- STEP 1: First cmake_project.init with cd(project_dir): cmake_project1.init() # -- POSTCONDITIONS: cmake_build_filename = project_build_dir / CMakeProjectPersistConfig.FILE_BASENAME assert project_build_dir.exists(), "ENSURE: project_build_dir exists" assert cmake_build_filename.exists() captured = capsys.readouterr() assert_cmake_project_used_init_using_captured(cmake_project1, captured, cmake_generator="NINJA") # -- STEP 2: Second cmake_project.init => SKIPPED with cd(project_dir): ctx.clear() cmake_project2 = CMakeProject(ctx, project_dir.relpath(), project_build_dir.relpath(), build_config) cmake_project2.init() captured = capsys.readouterr() assert ctx.last_command is None assert_cmake_project_skipped_reinit_using_captured(cmake_project1, captured)
def test_init__when_build_dir_exists_with_other_persistent_schema(self, tmpdir, capsys): ctx = MockContext() project_dir = Path(str(tmpdir)) project_build_dir = project_dir/"build" build_config = BuildConfig(cmake_generator="ninja", cmake_build_type="debug") assert not project_build_dir.isdir() # -- STEP 1: First cmake_project.init with cd(project_dir): cmake_project1 = CMakeProject(ctx, project_dir, project_build_dir, build_config) cmake_project1.init() cmake_build_filename = project_build_dir / CMakeProjectPersistConfig.FILE_BASENAME assert project_build_dir.exists(), "ENSURE: project_build_dir exists" assert cmake_build_filename.exists() # -- STEP: Fake cmake-build init with other persistent data schema. # HINT: May occur when cmake-build is updated, but project_build_dir still exists. with open(cmake_build_filename, "w") as f: f.write("""{ "other": 123, "cmake_generator": "ninja" }""") # -- STEP 2: Second try to cmake_project.init() # ENSURE: No failure / AssertionError occurs with cd(project_dir): cmake_project2 = CMakeProject(ctx, project_dir, project_build_dir, build_config) assert cmake_project2.initialized assert not cmake_project2.needs_reinit() assert cmake_project2.needs_update() cmake_project2.init() assert not cmake_project2.needs_reinit() assert not cmake_project2.needs_update() captured = capsys.readouterr() assert_cmake_project_needed_update_using_captured(cmake_project2, captured, cmake_generator="ninja")
def may_give_mixed_value_types(self, remote): remote.expect_sessions( Session("host1", user="******", cmd="nope"), Session("host2", cmd="nope"), ) with cd(support): _run_fab("hosts-are-mixed-values")
def test_remove_raises_oserror(self, tmp_path, monkeypatch, capsys): def mock_remove(p): raise OSError("MOCK_REMOVE: %s" % p) setup_workdir(tmp_path, [ "foo/one.xxx", "more/two.xxx", ]) problematic_file1 = tmp_path / "foo/one.xxx" problematic_file1 = problematic_file1.relative_to(tmp_path) problematic_file2 = tmp_path / "more/two.xxx" problematic_file2 = problematic_file2.relative_to(tmp_path) with cd(str(tmp_path)): monkeypatch.setattr("path.Path.remove_p", mock_remove) cleanup_files(["**/*.xxx"]) captured = capsys.readouterr() print(captured.out) expected1 = "REMOVE: %s" % problematic_file1 expected2 = "OSError: MOCK_REMOVE: %s" % problematic_file1 assert expected1 in captured.out assert expected2 in captured.out expected2 = "OSError: MOCK_REMOVE: %s" % problematic_file2 assert expected2 in captured.out
def per_project_config_files_are_loaded_before_task_parsing(self): # Relies on auto_dash_names being loaded at project-conf level; # fixes #467; when bug present, project conf is loaded _after_ # attempt to parse tasks, causing explosion when i_have_underscores # is only sent to parser as i-have-underscores. with cd(os.path.join('configs', 'underscores')): expect("i_have_underscores")
def comma_separated_string_is_multiple_hosts(self, remote): remote.expect_sessions( Session('host1', cmd='nope'), Session('host2', cmd='nope'), ) with cd(support): _run_fab("-H host1,host2 basic-run")
def dash_i_supplies_default_connect_kwarg_key_filename(self): # NOTE: the expect-identity task in tests/_support/fabfile.py # performs asserts about its context's .connect_kwargs value, # relying on other tests to prove connect_kwargs makes its way into # that context. with cd(support): _run_fab("-i identity.key expect-identity")
def basic_pre_and_post_tasks_still_work(self): with cd(support): # Sanity expect("first", "First!\n") expect("third", "Third!\n") # Real test expect("second", "First!\nSecond!\nThird!\n")
def test_build__auto_init_with_nonexisting_build_dir(self, tmpdir, capsys): ctx = MockContext() project_dir = Path(str(tmpdir)) project_build_dir = project_dir/"build" build_config = BuildConfig(cmake_generator="ninja") cmake_project1 = CMakeProject(ctx, project_dir, project_build_dir, build_config) assert not project_build_dir.isdir() # -- STEP: First cmake_project.build => AUTO CMAKE-INIT: project_build_dir with cd(project_dir): assert not project_build_dir.exists() cmake_project1.build() # -- POSTCONDITIONS: expected_commands = [ "cmake -G Ninja ..", "cmake --build .", ] assert ctx.commands == expected_commands cmake_build_filename = project_build_dir / CMakeProjectPersistConfig.FILE_BASENAME captured = capsys.readouterr() assert_cmake_project_used_init_using_captured(cmake_project1, captured, cmake_generator="ninja") # assert "CMAKE-INIT: build (using cmake.generator=ninja)" in captured.out assert "CMAKE-BUILD: build" in captured.out assert project_build_dir.exists() assert cmake_build_filename.exists()
def does_not_seek_tasks_module_if_namespace_was_given(self): with cd('implicit'): expect( 'foo', err="No idea what 'foo' is!\n", program=Program(namespace=Collection('blank')) )
def mezzo(ctx): ctx.run("mkdir -p build/jschema") ctx.run("cp -R jschema setup.py build/jschema") with cd("build"): ctx.run("tar cfz jschema.tar.gz jschema") ctx.run("mv jschema.tar.gz /opt/mezzo/dependencies") ctx.run("rm -rf jschema")
def may_give_Connection_init_kwarg_dicts(self, remote): remote.expect_sessions( Session("host1", user="******", cmd="nope"), Session("host2", cmd="nope"), ) with cd(support): _run_fab("hosts-are-init-kwargs")
def single_string_is_single_host_and_single_exec(self, remote): remote.expect(host="myhost", cmd="nope") # In addition to just testing a base case, this checks for a really # dumb bug where one appends to, instead of replacing, the task # list during parameterization/expansion XD with cd(support): _run_fab("-H myhost basic-run")
def clean(self, args=None, options=None, init_args=None, config=None): """Clean the build artifacts (but: preserve CMake init)""" project_build_dir = posixpath_normpath( self.project_build_dir.relpath()) if not self.initialized: print("CMAKE-CLEAN: {0} (SKIPPED: not initialized yet)".format( project_build_dir)) return # -- ALTERNATIVE: self.build(args="clean", ensure_init=False) self.ensure_init(args=init_args) print("CMAKE-CLEAN: {0}".format(project_build_dir)) cmake_clean_args = "clean" if args: clean_args = make_args_string(args) cmake_clean_args = "clean {0}".format(clean_args) options = options or [] if config: options.append("--config {0}".format(config)) cmake_options = " ".join(options) # pylint: disable=line-too-long with cd(self.project_build_dir): self.ctx.run("cmake --build . {0} -- {1}".format( cmake_options, cmake_clean_args))
def configure(self, **data): """Update CMake project build directory configuration""" project_build_dir = posixpath_normpath( self.project_build_dir.relpath()) if not self.initialized: print("CMAKE-UPDATE: {0} (SKIPPED: Not initialized yet)".format( project_build_dir)) return for name, value in data.items(): self.config.cmake_defines[name] = value # cmake_generator = data.pop("cmake_generator", None) # self.ensure_init(cmake_generator=cmake_generator) print("CMAKE-CONFIGURE: {0}".format(project_build_dir)) # more_cmake_defines = OrderedDict(data.items()) # cmake_options = cmake_cmdline_define_options([], **data) # print("XXX cmake_defines: %r" % self.config.cmake_defines) # pylint: disable=line-too-long cmake_options = self.make_cmake_configure_options(**data) with cd(self.project_build_dir): relpath_to_project_dir = self.project_build_dir.relpathto( self.project_dir) relpath_to_project_dir = posixpath_normpath(relpath_to_project_dir) self.ctx.run("cmake {0} {1}".format(cmake_options, relpath_to_project_dir)) # -- FINALLY: If cmake-init worked, store used cmake_generator. self.store_config()
def docs(): """Build html docs :return: """ with cd('docs'): run('make html')
def tasks_dedupe_honors_configuration(self): # Kinda-sorta duplicates some tests in executor.py, but eh. with cd("configs"): # Runtime conf file expect( "-c integration -f no-dedupe.yaml biz", out=""" foo foo bar biz post1 post2 post2 """.lstrip(), ) # Flag beats runtime expect( "-c integration -f dedupe.yaml --no-dedupe biz", out=""" foo foo bar biz post1 post2 post2 """.lstrip(), )
def update_gherkin(ctx, dry_run=False): """Update "gherkin-languages.json" file from cucumber-repo. * Download "gherkin-languages.json" from cucumber repo * Update "gherkin-languages.json" * Generate "i18n.py" file from "gherkin-languages.json" * Update "behave/i18n.py" file (optional; not in dry-run mode) """ with cd("etc/gherkin"): # -- BACKUP-FILE: gherkin_languages_file = Path("gherkin-languages.json") gherkin_languages_file.copy("gherkin-languages.json.SAVED") print('Downloading "gherkin-languages.json" from github:cucumber ...') download_request = requests.get(GHERKIN_LANGUAGES_URL) assert download_request.ok print('Download finished: OK (size={0})'.format( len(download_request.content))) with open(gherkin_languages_file, "wb") as f: f.write(download_request.content) print('Generating "i18n.py" ...') ctx.run("./convert_gherkin-languages.py") ctx.run("diff i18n.py ../../behave/i18n.py") if not dry_run: print("Updating behave/i18n.py ...") Path("i18n.py").move("../../behave/i18n.py")
def loads_fabfile_not_tasks(self): "Loads fabfile.py, not tasks.py" with cd(support): expect( "--list", """ Available tasks: basic-run build deploy expect-from-env expect-identities expect-identity expect-mutation expect-mutation-to-fail expect-vanilla-Context first hosts-are-host-stringlike hosts-are-init-kwargs hosts-are-mixed-values hosts-are-myhost mutate second third two-hosts """.lstrip(), )
def per_project_config_files_are_loaded_before_task_parsing(self): # Relies on auto_dash_names being loaded at project-conf level; # fixes #467; when bug present, project conf is loaded _after_ # attempt to parse tasks, causing explosion when i_have_underscores # is only sent to parser as i-have-underscores. with cd(os.path.join("configs", "underscores")): expect("i_have_underscores")
def test_cleanup_dirs_without_configfile(self, tmp_path): # -- SETUP: setup_workdir(tmp_path, [ ".venv_DEFAULT/.dir", "downloads/.dir", ]) my_dir1 = tmp_path / ".venv_DEFAULT" my_dir2 = tmp_path / "downloads" assert my_dir1.exists() and my_dir1.is_dir() assert my_dir2.exists() and my_dir2.is_dir() tasks_dir = tmp_path / "tasks.py" tasks_dir.write_text(TASKS_FILE_TEXT_USING_CLEANUP_MODULE_ONLY) config_dir = tmp_path / "invoke.yaml" assert not config_dir.exists() # -- EXECUTE AND VERIFY: with use_subprocess_coverage(tmp_path): with cd(str(tmp_path)): output = ensure_text(run_with_output("invoke cleanup.all")) assert not my_dir1.exists() assert not my_dir2.exists() expected1 = "RMTREE: .venv_DEFAULT" expected2 = "RMTREE: downloads" assert expected1 in output assert expected2 in output
def test_invoke_calls_other_cleanup_task(self, tmp_path): tasks_file = tmp_path / "tasks.py" tasks_file.write_text(u""" from __future__ import absolute_import, print_function from invoke import task, Collection import invoke_cleanup as cleanup @task def foo_clean(ctx): print("CALLED: foo_clean") namespace = Collection(foo_clean) namespace.add_collection(Collection.from_module(cleanup), name="cleanup") namespace.configure(cleanup.namespace.configuration()) from invoke_cleanup import cleanup_all_tasks cleanup_all_tasks.add_task(foo_clean, name="foo_clean") cleanup_all_tasks.configure(namespace.configuration()) """) # -- EXECUTE AND VERIFY: with use_subprocess_coverage(tmp_path): with cd(str(tmp_path)): output = ensure_text(run_with_output("invoke cleanup.all")) expected1 = "CLEANUP TASK: foo-clean" expected2 = "CALLED: foo_clean" assert expected1 in output assert expected2 in output
def tasks_dedupe_honors_configuration(self): # Kinda-sorta duplicates some tests in executor.py, but eh. with cd('configs'): # Runtime conf file expect( "-c integration -f no-dedupe.yaml biz", out=""" foo foo bar biz post1 post2 post2 """.lstrip()) # Flag beats runtime expect( "-c integration -f dedupe.yaml --no-dedupe biz", out=""" foo foo bar biz post1 post2 post2 """.lstrip())
def test_with_configfile_and_cleanup_files_overrides_default( self, tmp_path): # -- SETUP: setup_workdir(tmp_path, [ "one.xxx", "more/two.zzz", ]) my_file1 = tmp_path / "one.xxx" my_file2 = tmp_path / "more/two.zzz" assert my_file1.exists() and my_file1.is_file() assert my_file2.exists() and my_file2.is_file() tasks_file = tmp_path / "tasks.py" tasks_file.write_text(TASKS_FILE_TEXT_USING_CLEANUP_MODULE_ONLY) config_file = tmp_path / "invoke.yaml" config_file.write_text(u""" cleanup_all: files: - "**/*.xxx" - "**/*.zzz" """) # -- EXECUTE AND VERIFY: with use_subprocess_coverage(tmp_path): with cd(str(tmp_path)): output = ensure_text(run_with_output("invoke cleanup.all")) assert not my_file1.exists() assert not my_file2.exists() expected1 = "REMOVE: one.xxx" expected2 = "REMOVE: more/two.zzz" assert expected1 in output assert expected2 in output
def key_filename_can_be_set_via_non_override_config_levels(self): # Proves/protects against #1762, where eg key_filenames gets # 'reset' to an empty list. Arbitrarily uses the 'yml' level of # test fixtures, which has a fabric.yml w/ a # connect_kwargs.key_filename value of [private.key, other.key]. with cd(os.path.join(support, "yml_conf")): program.run("fab expect-conf-key-filename")
def pipsync(ctx): """ Upgrade virtualenvironment to contain dependencies and versions exactly defined in requirements.txt """ with cd(project_root): ctx.run('pip-sync requirements.txt', pty=True, echo=True)
def single_string_is_single_host_and_single_exec(self, remote): remote.expect(host='myhost', cmd='nope') # In addition to just testing a base case, this checks for a really # dumb bug where one appends to, instead of replacing, the task # list during parameterization/expansion XD with cd(support): _run_fab("-H myhost basic-run")
def do(ctx, cmd, dry_run=None, **kwargs): run_env = kwargs.pop('env', {}) path = kwargs.pop('path', None) if path: path = os.path.abspath(os.path.expandvars(os.path.expanduser(path))) if not os.path.isdir(path): raise NotADirectoryError(f'{path}') if dry_run is None: dry_run = env.dry_run if dry_run: cmd_str = [] if run_env: env_vars = 'export ' + ' '.join(f'{k}={v}' for k, v in run_env.items()) cmd_str.append(env_vars) if path: cmd_str.append(f'cd {path}') cmd_str.append(cmd) print(' && '.join(cmd_str)) else: if not path: return run(cmd, env=run_env, **kwargs) else: with cd(path): return run(cmd, env=run_env, **kwargs)
def test_ctest(self, tmpdir, config): cmake_project = self.make_initialized_cmake_project( tmpdir, cmake_generator="ninja") with cd(cmake_project.project_dir): cmake_project.test(config=config) expected = "ctest -C {0}".format(config) assert cmake_project.ctx.last_command == expected
def cf_upload(c): """Publish to Rackspace Cloud Files""" rebuild(c) with cd(CONFIG['deploy_path']): c.run('swift -v -A https://auth.api.rackspacecloud.com/v1.0 ' '-U {cloudfiles_username} ' '-K {cloudfiles_api_key} ' 'upload -c {cloudfiles_container} .'.format(**CONFIG))
def create_test_app(): """Create a test app structure :return: """ mkdir(path='tests') with cd('tests'): run('django-admin.exe startproject config .')
def docs(ctx): """Build html docs """ run('sphinx-apidoc -f -o docs/modules dstack_factory') with cd('docs'): run('make html')
def cli_option_wins_over_env(self, reset_environ): # Set env var to load the JSON config instead of the YAML one, # which contains a "json" string internally. os.environ["INVOKE_RUNTIME_CONFIG"] = "json/invoke.json" with cd("configs"): # But run the default test task, which expects a "yaml" # string. If the env var won, this would explode. expect("-c runtime -f yaml/invoke.yaml mytask")
def test_clean(self, tmpdir, config): cmake_project = self.make_initialized_cmake_project( tmpdir, cmake_generator="ninja") with cd(cmake_project.project_dir): cmake_project.clean(config=config) expected = "cmake --build . --config {0} -- clean".format(config) assert cmake_project.ctx.last_command == expected
def executor_is_given_access_to_core_args_and_remainder(self): klass = Mock() with cd('implicit'): cmd = "myapp -e foo -- myremainder" Program(executor_class=klass).run(cmd, exit=False) core = klass.call_args[0][2] eq_(core[0].args['echo'].value, True) eq_(core.remainder, "myremainder")
def _expect_prompt(self, getpass, flag, key, value, prompt): getpass.return_value = value with cd(support): # Expect that the given key was found in the context. cmd = "-c prompting {} expect-connect-kwarg --key {} --val {}" _run_fab(cmd.format(flag, key, value)) # Then we also expect that getpass was called w/ expected prompt getpass.assert_called_once_with(prompt)
def invoke_deploy_task(config_name, app, task): with cd(app): try: run('inv --config ' + config_name + ' ' + task, hide='both', pty=True) except Failure as failure: cprint('{app}: task "{task}" failed'.format_map(locals()), 'red') cprint('Error output is:', 'red') print(failure.result.stdout, end='') raise
def build(ctx, builder="html", options=""): """Build docs with sphinx-build""" sourcedir = ctx.config.sphinx.sourcedir destdir = Path(ctx.config.sphinx.destdir or "build")/builder destdir = destdir.abspath() with cd(sourcedir): destdir_relative = Path(".").relpathto(destdir) command = "sphinx-build {opts} -b {builder} {sourcedir} {destdir}" \ .format(builder=builder, sourcedir=".", destdir=destdir_relative, opts=options) ctx.run(command)
def _run( self, flag='-S', file_='ssh_config/runtime.conf', tasks='runtime-ssh-config', ): with cd(support): # Relies on asserts within the task, which will bubble up as # it's executed in-process cmd = "-c runtime_fabfile {} {} -H runtime {}" _run_fab(cmd.format(flag, file_, tasks))
def run_tests(ctx, test_module='pannier', opts='', pty=False): print("Cleaning out pycs") ctx.run('find . -type f -name \*.pyc -delete') with util.cd(os.path.join(LOCAL_ROOT, 'pannier_project')): ctx.run( 'TESTS=true coverage run --source=pannier manage.py test {} {}'.format( test_module, opts ), pty=pty ) ctx.run('coverage xml')
def pre_post_tasks_are_not_parameterized_across_hosts(self): with cd(support): _run_fab("-H hostA,hostB,hostC second --show-host") output = sys.stdout.getvalue() # Expect pre once, 3x main, post once, as opposed to e.g. both # pre and main task expected = """ First! Second: hostA Second: hostB Second: hostC Third! """.lstrip() assert output == expected
def build(ctx, builder="html", language=None, options=""): """Build docs with sphinx-build""" language = _sphinxdoc_get_language(ctx, language) sourcedir = ctx.config.sphinx.sourcedir destdir = _sphinxdoc_get_destdir(ctx, builder, language=language) destdir = destdir.abspath() with cd(sourcedir): destdir_relative = Path(".").relpathto(destdir) command = "sphinx-build {opts} -b {builder} -D language={language} {sourcedir} {destdir}" \ .format(builder=builder, sourcedir=".", destdir=destdir_relative, language=language, opts=options) ctx.run(command)
def update(ctx): """ Update applications/* from git origin. """ print_bold('Updating qabel-infrastructure') run('git pull --ff-only') for app in APPS: papp = Path(app) if not papp.exists(): print_bold('Cloning', app) run('git clone https://github.com/Qabel/qabel-{name} {path}'.format(name=papp.name, path=papp)) continue # no need to pull if we just cloned with cd(app): print_bold('Updating', app) run('git pull --ff-only')
def publish(c): """Publish to production""" update_repo() clean(c) with alter_template() as _: local('pelican -s publishconf.py') search_path = pathlib.Path(CONFIG['deploy_path'])/'tipuesearch_content.js' search_path_fix = pathlib.Path(CONFIG['deploy_path'])/'tipuesearch_content.json' if os.path.isfile(search_path): shutil.move(search_path, search_path_fix) # Detect if in local machine or in travis-ci if os.environ.get('TRAVIS', 'false') != 'true': with cd(f"{CONFIG['deploy_path']}"): local('git checkout master') local('git add --all') local(f'''git commit -m "{CONFIG['commit_message']}"''') local('git push -u github master --quiet') local('python ./utils/gitalk.py')
def deploy(): """ Based on https://gist.github.com/domenic/ec8b0fc8ab45f39403dd """ run("rm -rf ./site/") run("mkdocs build") with util.cd("./site/"): run("git init") run('echo ".*pyc" > .gitignore') run('git config user.name "Travis CI"') run('git config user.email "%s"' % os.environ["EMAIL"]) run("git add .") run('git commit -m "Deploy to GitHub Pages"') run( 'git push --force --quiet "https://{GH_TOKEN}@{GH_REF}" master:gh-pages > /dev/null 2>&1'.format( GH_TOKEN=os.environ["GH_TOKEN"], GH_REF=os.environ["GH_REF"] ) )
def deploy_docs(): """ Based on https://gist.github.com/domenic/ec8b0fc8ab45f39403dd """ run('rm -rf ./site/') build_docs() with util.cd('./site/'): run('git init') run('echo ".*pyc" > .gitignore') run('git config user.name "Travis CI"') run('git config user.email "%s"' % os.environ['EMAIL']) run('git add .') run('git commit -m "Deploy to GitHub Pages"') run( 'git push --force --quiet "https://{GH_TOKEN}@{GH_REF}" ' 'master:gh-pages > /dev/null 2>&1'.format( GH_TOKEN=os.environ['GH_TOKEN'], GH_REF=os.environ['GH_REF'], ) )
def loads_fabfile_not_tasks(self): "Loads fabfile.py, not tasks.py" with cd(support): expect( "--list", """ Available tasks: basic-run build deploy expect-from-env expect-identities expect-identity expect-mutation expect-mutation-to-fail expect-vanilla-Context first mutate second third """.lstrip())
def seeks_and_loads_tasks_module_by_default(self): with cd('implicit'): expect('foo', out="Hm\n")
def core_help_doesnt_get_mad_if_loading_fails(self): # Expects no tasks.py in root of FS with cd(ROOT): expect("--help", out="Usage: ", test=assert_contains)
def per_project_config_files_are_loaded(self): with cd(os.path.join('configs', 'yaml')): expect("mytask")
def per_project_config_files_load_with_explicit_ns(self): # Re: #234 with cd(os.path.join('configs', 'yaml')): expect("-c explicit mytask")
def uses_executor_class_given(self): klass = Mock() with cd('implicit'): Program(executor_class=klass).run("myapp foo", exit=False) klass.assert_called_with(ANY, ANY, ANY) klass.return_value.execute.assert_called_with(ANY)
def runtime_config_file_honored(self): with cd('configs'): expect("-c runtime -f yaml/invoke.yaml mytask")