def test_project_md(): """Make sure that project.md generates a file that passes pytest.""" simulator_status = verify.one_example( 'phmdoctest project.md --outfile discarded.py', want_file_name=None, pytest_options=['--strict', '--doctest-modules', '-v']) assert simulator_status.runner_status.exit_code == 0 assert simulator_status.pytest_exit_code == 0
def test_inline_example(): """Make sure generated --outfile is as the same as on disk.""" directive_command = labeled.contents(label="inline-outfile") _ = verify.one_example( directive_command, want_file_name="doc/test_inline_example.py", pytest_options=None, )
def test_outfile_to_stdout(): """Make sure generated --outfile and --report are as expected.""" outfile_command1 = labeled.contents(label="outfile-dash1") outfile_command2 = labeled.contents(label="outfile-dash2") simulator_status = verify.one_example(outfile_command1, want_file_name=None, pytest_options=None) with open("doc/test_example2.py", "r", encoding="utf-8") as fp: want = fp.read() got1 = simulator_status.runner_status.stdout verify.a_and_b_are_the_same(want, got1) simulator_status = verify.one_example(outfile_command2, want_file_name=None, pytest_options=None) got2 = simulator_status.runner_status.stdout verify.a_and_b_are_the_same(want, got2)
def test_outfile_to_stdout(): """Make sure generated --outfile and --report are as expected.""" outfile_command1 = next(readme_blocks) outfile_command2 = next(readme_blocks) simulator_status = verify.one_example(outfile_command1, want_file_name=None, pytest_options=None) with open('doc/test_example2.py') as fp: want = fp.read() got1 = simulator_status.runner_status.stdout verify.a_and_b_are_the_same(want, got1) simulator_status = verify.one_example(outfile_command2, want_file_name=None, pytest_options=None) got2 = simulator_status.runner_status.stdout verify.a_and_b_are_the_same(want, got2)
def test_setup_report_example(): """Make sure report in README is correct.""" command = labeled.contents(label="setup-command-report") want = labeled.contents(label="setup-report") simulator_status = verify.one_example(command, want_file_name=None, pytest_options=None) got1 = simulator_status.runner_status.stdout verify.a_and_b_are_the_same(want, got1)
def test_pytest_session_fails(): """Make sure pytest fails due to incorrect session output in the .md file. Generate a pytest that fails pytest. """ simulator_status = verify.one_example( 'phmdoctest tests/bad_session_output.md --outfile discarded.py', want_file_name=None, pytest_options=['--strict', '--doctest-modules', '-v']) assert simulator_status.pytest_exit_code == 1
def test_skip_example(): """Make sure generated --outfile and --report are as expected.""" skip_command = labeled.contents(label="skip-command") want = labeled.contents(label="skip-report") short_form_command = labeled.contents(label="short-skip-command") simulator_status = verify.one_example( skip_command, want_file_name="doc/test_example2.py", pytest_options=None) got1 = simulator_status.runner_status.stdout verify.a_and_b_are_the_same(want, got1) # test the first -s form of the --skip simulator_status = verify.one_example( short_form_command, want_file_name="doc/test_example2.py", pytest_options=None) got2 = simulator_status.runner_status.stdout verify.a_and_b_are_the_same(want, got2)
def test_directive3_example(): """Make sure generated --outfile and --report are as expected.""" # Note that the report_command is hard coded here. # The command shown in README.md is not tested. report_command = "phmdoctest doc/directive3.md --report" directive_command = labeled.contents(label="directive-3-outfile") _ = verify.one_example(directive_command, want_file_name="doc/test_directive3.py", pytest_options=None) with open("doc/directive3_report.txt") as f: want = f.read() simulator_status = verify.one_example(report_command, want_file_name=None, pytest_options=None) got = simulator_status.runner_status.stdout verify.a_and_b_are_the_same(want, got)
def test_skip_example(): """Make sure generated --outfile and --report are as expected.""" skip_command = next(readme_blocks) want = next(readme_blocks) # get the skip report short_form_command = next(readme_blocks) simulator_status = verify.one_example( skip_command, want_file_name='doc/test_example2.py', pytest_options=None) got1 = simulator_status.runner_status.stdout verify.a_and_b_are_the_same(want, got1) # test the first -s form of the --skip simulator_status = verify.one_example( short_form_command, want_file_name='doc/test_example2.py', pytest_options=None) got2 = simulator_status.runner_status.stdout verify.a_and_b_are_the_same(want, got2)
def test_setup_with_inline(): """Do inline annotations in setup and teardown blocks.""" command = ( "phmdoctest tests/setup_with_inline.md -u FIRST -d LAST --outfile discarded.py" ) simulator_status = verify.one_example( well_formed_command=command, want_file_name="tests/test_setup_with_inline.py", pytest_options=["--doctest-modules", "-v"], ) assert simulator_status.runner_status.exit_code == 0
def test_no_markdown_fenced_code_blocks(): """Show --report works when there is nothing to report.""" simulator_status = verify.one_example( "phmdoctest tests/no_fenced_code_blocks.md" " --report --outfile discarded.py", want_file_name=None, pytest_options=["--doctest-modules", "-v"], ) assert simulator_status.runner_status.exit_code == 0 assert simulator_status.pytest_exit_code == 0 stdout = simulator_status.runner_status.stdout assert "0 test cases." in stdout
def test_example2_report(): """Check example2_report.txt used in .travis.yml.""" simulator_status = verify.one_example( 'phmdoctest doc/example2.md --skip "Python 3.7" --skip LAST --report' ' --outfile discarded.py', want_file_name=None, pytest_options=None) assert simulator_status.runner_status.exit_code == 0 stdout = simulator_status.runner_status.stdout with open('tests/example2_report.txt', 'r', encoding='utf-8') as f: want = f.read() verify.a_and_b_are_the_same(a=want, b=stdout)
def test_empty_code_block_report(): """Empty code block and associated output block get del'd.""" simulator_status = verify.one_example( "phmdoctest tests/empty_code_block.md" " --report --outfile discarded.py", want_file_name=None, pytest_options=["--doctest-modules", "-v"], ) assert simulator_status.runner_status.exit_code == 0 assert simulator_status.pytest_exit_code == 0 stdout = simulator_status.runner_status.stdout with open("tests/empty_code_report.txt", "r", encoding="utf-8") as f: want = f.read() verify.a_and_b_are_the_same(a=want, b=stdout)
def test_example1(): """Make sure generated --outfile is as expected; Run pytest. Check the copy of test_example1.py in the fenced code block. """ # The helper checks the generated --outfile against the disk file. example1_command = next(readme_blocks) want = next(readme_blocks) _ = verify.one_example(example1_command, want_file_name='doc/test_example1.py', pytest_options=None) # Make sure the copy of test_example1.py in README.md # is the same as the disk file. with open('doc/test_example1.py') as fp: got = fp.read() verify.a_and_b_are_the_same(want, got) # Run again and call pytest to make sure the file works with pytest. simulator_status = verify.one_example( example1_command, want_file_name=None, pytest_options=['--strict', '--doctest-modules', '-v']) assert simulator_status.pytest_exit_code == 0
def test_example1(): """Make sure generated --outfile is as expected; Run pytest. Check the copy of test_example1.py in the fenced code block. """ # The helper checks the generated --outfile against the disk file. example1_command = labeled.contents(label="example1-command") want = labeled.contents(label="example1-outfile") _ = verify.one_example(example1_command, want_file_name="doc/test_example1.py", pytest_options=None) # Make sure the copy of test_example1.py in README.md # is the same as the disk file. with open("doc/test_example1.py", "r", encoding="utf-8") as fp: got = fp.read() verify.a_and_b_are_the_same(want, got) # Run again and call pytest to make sure the file works with pytest. simulator_status = verify.one_example( example1_command, want_file_name=None, pytest_options=["--doctest-modules", "-v"], ) assert simulator_status.pytest_exit_code == 0
def test_directive_example(): """Make sure generated --outfile is as expected; Run pytest. Check the --outfile against the copy in the fenced code block. """ example1_command = labeled.contents(label="directive-example-command") want = labeled.contents(label="directive-example-outfile") simulator_status = verify.one_example( example1_command, want_file_name=None, pytest_options=["--doctest-modules", "-v"], ) # Fenced code block in README.md is the same as the --outfile. got = simulator_status.outfile verify.a_and_b_are_the_same(want, got) assert simulator_status.pytest_exit_code == 0
def test_managenamespace_outfile(): """Show that managenamespace.md generates test_managenamespace.py.""" # Generate an outfile from tests/managenamespace.md and # compare it to the test suite file tests/test_managenamespace.py. # # When pytest runs tests/test_managenamespace.py the # fixture managenamespace is imported and update() is called. # The line _ = additions.pop('sys', None) is called. # # The combination of import sys at the top of the test file # and import sys in the example code is needed to test # that line of code. command = "phmdoctest tests/managenamespace.md --outfile discarded.py" _ = verify.one_example(command, want_file_name="tests/test_managenamespace.py", pytest_options=None)
def test_no_fails_junit_xml(): """Generate JUnit XML from pytest with no failures.""" simulator_status = verify.one_example( "phmdoctest project.md --outfile discarded.py", want_file_name=None, pytest_options=["--doctest-modules", "-v"], junit_family=verify.JUNIT_FAMILY, ) assert simulator_status.runner_status.exit_code == 0 assert simulator_status.pytest_exit_code == 0 # Look at the returned JUnit XML to see that the test failed at the # point and for the reason we expected. # Note that the parsed XML values are all strings. suite, fails = phmdoctest.tool.extract_testsuite( simulator_status.junit_xml) assert suite.attrib["tests"] == "4" # run with --report assert suite.attrib["errors"] == "0" assert suite.attrib["failures"] == "0" assert len(fails) == 0
def test_pytest_really_fails(): """Make sure pytest fails due to incorrect expected output in the .md. Generate a pytest that will assert. """ simulator_status = verify.one_example( "phmdoctest tests/unexpected_output.md --outfile discarded.py", want_file_name=None, pytest_options=["--doctest-modules", "-v"], junit_family=verify.JUNIT_FAMILY, ) assert simulator_status.pytest_exit_code == 1 # Look at the returned JUnit XML to see that the test failed at the # point and for the reason we expected. # Note that the parsed XML values are all strings. suite, fails = phmdoctest.tool.extract_testsuite( simulator_status.junit_xml) assert suite.attrib["tests"] == "1" assert suite.attrib["errors"] == "0" assert suite.attrib["failures"] == "1" assert fails[0].attrib["name"] == "test_code_4_output_17"
def test_setup_doctest(): """Make sure --setup-doctest --outfile is correct.""" command = labeled.contents(label="setup-doctest-outfile") _ = verify.one_example(command, want_file_name="doc/test_setup_doctest.py", pytest_options=None)