def test_temci_init_build_config(): run_temci( "build a.yaml", files={ "a.yaml": run_temci("init build_config").file_contents["build_config.yaml"] })
def test_build_before_exec_abort(): run_temci("exec bla.yaml --runs 1", files={ "bla.yaml": [ { "run_config": {"cmd": "./test", "tags": []}, "build_config": {"cmd": "exit(1)"} } ] }, expect_success=False, raise_exc=False)
def test_build_before_exec(): run_temci("exec bla.yaml --runs 1", files={ "bla.yaml": [ { "run_config": {"cmd": "./test", "tags": []}, "build_config": {"cmd": "echo 'echo 3' > test; chmod +x test"} } ] })
def test_included_blocks_single_issue99(): r = run_temci("exec --in in.yaml --included_blocks b --runs 0", files={ "in.yaml": [ { "attributes": { "description": "a" }, "run_config": { "cmd": "true" } }, { "attributes": { "description": "b" }, "run_config": { "cmd": "true" }, "build_config": { "cmd": "false" } } ] }, expect_success=False) assert r.ret_code != 0
def test_codespeed_reporter_failed(): d = lambda: {"attributes": {"description": "XYZ"}, "data": {"p": [1]}} e = lambda: { "attributes": { "description": "ZYX" }, "data": {}, "error": { "message": "no", "error_output": "", "output": "", "return_code": 1 } } out = run_temci("report in.yaml", settings={ "report": { "reporter": "codespeed", "codespeed_misc": { "project": "test" } } }, files={ "in.yaml": [d(), e()], }).out j = json.loads(out) assert len(j) == 1
def test_multiline_perf_command(): assert "task-clock" in run_temci("exec bla.yaml", files={ "bla.yaml": [ { "run_config": {"runner": "perf_stat", "cmd": "\necho 42", "runs": 1} } ] }).yaml_contents["run_output.yaml"][0]["data"]
def test_per_block_runs_issue_113(): assert len(run_temci("exec bla.yaml", files={ "bla.yaml": [ { "run_config": {"cmd": "echo nooo", "runs": 1} } ] }).yaml_contents["run_output.yaml"][0]["data"]["stime"]) == 1
def test_support_multiple_inputs(): d = lambda: {"attributes": {"description": "XYZ"}, "data": {"p": [1]}} out = run_temci("report in1.yaml in2.yaml --console_mode auto", files={ "in1.yaml": [d()], "in2.yaml": [d(), d()] }).out assert any("XYZ [1]" in l and "XYZ [2]" in l for l in out.split("\n"))
def test_check_tag_attribute(): assert run_temci("exec bla.yaml --runs 1", files={ "bla.yaml": [ { "run_config": {"cmd": "echo 1"}, "attributes": {"tags": "slow"} } ] }, expect_success=False).ret_code != 0
def test_config_not_ignored(): """ Issue "Config now seems to be ignored completely after type checking #62" """ assert "3 single benchmarks" in run_temci("short exec ls", settings={ "run": { "runs": 3 } }).out
def test_html2_with_single(): assert "report.html" in run_temci("report --reporter html2 in.yaml", files={ "in.yaml": [{ "attributes": { "description": "XYZ" }, "data": { "p": [1] } }] }).file_contents
def test_console_baseline(): run_temci(r"report in.yaml --console_baseline base", files={ "in.yaml": [{ "attributes": { "description": "XYZ" }, "data": { "p456": [1], "z111": [2] } }, { "attributes": { "description": "base" }, "data": { "p456": [1], "z111": [2] } }] }).out
def test_properties_regexp(): out = run_temci(r"report in.yaml --properties 'p.*'", files={ "in.yaml": [{ "attributes": { "description": "XYZ" }, "data": { "p456": [1], "z111": [2] } }] }).out assert "p456" in out and "z111" not in out
def test_build_before_exec_only_build(): assert "3333" not in run_temci("exec bla.yaml --runs 1", files={ "bla.yaml": [ { "attributes": {"description": "3333"}, "run_config": {"cmd": "./test", "tags": []}, "build_config": {"cmd": "echo 'echo 3333' > test; chmod +x test"} } ] }, settings={ "run": { "only_build": True } }, expect_success=False, raise_exc=False).out
def test_codespeed_reporter(): d = lambda: {"attributes": {"description": "XYZ"}, "data": {"p": [1]}} out = run_temci("report in.yaml", settings={ "report": { "reporter": "codespeed", "codespeed_misc": { "project": "test" } } }, files={ "in.yaml": [d()], }).out j = json.loads(out) assert len(j) == 1 assert j[0]["benchmark"] == "XYZ: p"
def test_build_before_exec_do_not_arbort(): assert "3333" in run_temci("exec bla.yaml --runs 1", files={ "bla.yaml": [ { "run_config": {"cmd": "./test", "tags": []}, "build_config": {"cmd": "exit(1)"} }, { "attributes": {"description": "3333"}, "run_config": {"cmd": "./test", "tags": []}, "build_config": {"cmd": "echo 'echo 3333' > test; chmod +x test"} } ] }, settings={ "run": { "abort_after_build_error": False } }, expect_success=False, raise_exc=False).out
def test_console_reporter_auto_mode(): d = lambda d: { "attributes": { "description": "XYZ" + d }, "data": { "p": [1] } } out = run_temci("report in.yaml --console_mode auto", files={ "in.yaml": [d(""), d(""), d(""), d("W"), d("X")] }).out assert "Report for XYZ" in out assert any("XYZ [1]" in l and "XYZ [2]" in l for l in out.split("\n")) assert "XYZX" in out
def test_max_runs_per_block(): assert " 1 single bench" in run_temci("exec bla.yaml", settings={ "run": { "max_runs": 4, "min_runs": 2 } }, files={"bla.yaml": [{ "run_config": { "cmd": "ls", "max_runs": 1 } }, { "run_config": { "cmd": "ls .", "max_runs": 3 } }]}).out
def test_settings_set_config_option_not_to_itself(): assert run_temci("init settings").yaml_contents["temci.yaml"]["settings"] != "temci.yaml"
def test_format(): assert run_temci("format 1.001 0.05").out == "1.0(01)"
def test_parse_output_option(): out = run_temci("short exec 'echo foo: 3' --runner time --parse_output").out assert "time " in out assert "foo " in out
def test_pass_arguments(): assert run_temci("short exec exit --argument 1", expect_success=False).ret_code == ErrorCode.PROGRAM_ERROR.value
def test_discard_blocks_on_error(): assert run_temci("short exec 'exit 1' --discard_all_data_for_block_on_error", expect_success=False).ret_code == ErrorCode.PROGRAM_ERROR.value
def test_runs_option_broken(): assert len(run_temci("short exec 'exit 0' --min_runs 2 --max_runs 2 --runs 3") .yaml_contents["run_output.yaml"][0]["data"]["stime"]) == 3
def test_envinfo_in_result(): assert any("env_info" in v for v in run_temci("short exec ls").yaml_contents["run_output.yaml"])
def test_temci_init_run_config(): run_temci("exec a.yaml", files={ "a.yaml": run_temci("init run_config").yaml_contents["run_config.yaml"] })
def test_config_default_values(): assert "11 single bench" in run_temci("short exec ls --log_level debug", settings={ "run": { "runs": 11 } }).out
def test_included_blocks(): out = run_temci("short exec echo ls --included_blocks ls --runs 1").out assert "ls" in out and "echo" not in out
def test_successful_run_errors(): d = run_temci("short exec true").yaml_contents["run_output.yaml"][0] assert "internal_error" not in d assert "error" not in d
def test_errorneous_run(): d = run_temci("short exec 'exit 1'", expect_success=False).yaml_contents["run_output.yaml"][0] assert "error" in d e = d["error"] assert e["return_code"] == 1