def test_templated_prompt_builtins(): data = query_user_data({"question": { "default": "[[ now() ]]" }}, answers_data, False, envops) assert isinstance(data["question"], datetime) data = query_user_data({"question": { "default": "[[ make_secret() ]]" }}, answers_data, False, envops) assert isinstance(data["question"], str) and len(data["question"]) == 128
def test_templated_prompt(questions_data, expected_value, expected_outputs, capsys, monkeypatch): monkeypatch.setattr("sys.stdin", io.StringIO("\n\n")) data = query_user_data({ **main_question, **questions_data }, answers_data, True, envops) captured = capsys.readouterr() data.pop("main") name, value = list(data.items())[0] assert value == expected_value for output in expected_outputs: assert output in captured.out
def test_templated_prompt(questions_data, expected_value, expected_outputs, capsys, monkeypatch): monkeypatch.setattr("sys.stdin", io.StringIO("\n\n")) questions_combined = filter_config({**main_question, **questions_data})[1] data = dict( ChainMap( query_user_data(questions_combined, {}, {}, True, envops), {k: v["default"] for k, v in questions_combined.items()}, )) captured = capsys.readouterr() data.pop("main") name, value = list(data.items())[0] assert value == expected_value for output in expected_outputs: assert output in captured.out
def test_templated_prompt_invalid(): # assert no exception in non-strict mode query_user_data({"question": { "default": "[[ not_valid ]]" }}, answers_data, False, envops) # assert no exception in non-strict mode query_user_data({"question": { "help": "[[ not_valid ]]" }}, answers_data, False, envops) with pytest.raises(InvalidTypeError): query_user_data({"question": { "type": "[[ not_valid ]]" }}, answers_data, False, envops) # assert no exception in non-strict mode query_user_data({"question": { "choices": ["[[ not_valid ]]"] }}, answers_data, False, envops)