def test_display_failures(swagger_20, capsys, execution_context, results_set, verbosity, response): execution_context.verbosity = verbosity # Given two test results - success and failure operation = models.APIOperation("/api/failure", "GET", {}, base_url="http://127.0.0.1:8080", schema=swagger_20) failure = models.TestResult(operation.method, operation.full_path, DataGenerationMethod.default()) failure.add_failure("test", models.Case(operation), response, 0, "Message") execution_context.results.append( SerializedTestResult.from_test_result(failure)) results_set.append(failure) event = Finished.from_results(results_set, 1.0) # When the failures are displayed default.display_failures(execution_context, event) out = capsys.readouterr().out.strip() # Then section title is displayed assert " FAILURES " in out # And operation with a failure is displayed as a subsection assert " GET: /v1/api/failure " in out assert "Message" in out assert "Run this Python code to reproduce this failure: " in out assert f"requests.get('http://127.0.0.1:8080/api/failure', headers={{'User-Agent': '{USER_AGENT}'}})" in out
def test_display_errors(swagger_20, capsys, results_set, execution_context, show_errors_tracebacks): # Given two test results - success and error operation = models.APIOperation("/api/error", "GET", {}, swagger_20) error = models.TestResult(operation.method, operation.full_path, DataGenerationMethod.default(), seed=123) error.add_error(ConnectionError("Connection refused!"), models.Case(operation, query={"a": 1})) results_set.append(error) execution_context.results.append( SerializedTestResult.from_test_result(error)) event = Finished.from_results(results_set, 1.0) # When the errors are displayed execution_context.show_errors_tracebacks = show_errors_tracebacks default.display_errors(execution_context, event) out = capsys.readouterr().out.strip() # Then section title is displayed assert " ERRORS " in out help_message_exists = ( "Add this option to your command line parameters to see full tracebacks: --show-errors-tracebacks" in out) # And help message is displayed only if tracebacks are not shown assert help_message_exists is not show_errors_tracebacks # And operation with an error is displayed as a subsection assert " GET: /v1/api/error " in out # And the error itself is displayed assert "ConnectionError: Connection refused!" in out # And the example is displayed assert "Query : {'a': 1}" in out assert "Or add this option to your command line parameters: --hypothesis-seed=123" in out
def test_display_statistic(capsys, swagger_20, execution_context, operation, response): # Given multiple successful & failed checks in a single test success = models.Check("not_a_server_error", models.Status.success, response, 0, models.Case(operation)) failure = models.Check("not_a_server_error", models.Status.failure, response, 0, models.Case(operation)) single_test_statistic = models.TestResult( operation.method, operation.full_path, DataGenerationMethod.default(), [ success, success, success, failure, failure, models.Check("different_check", models.Status.success, response, 0, models.Case(operation)), ], ) results = models.TestResultSet([single_test_statistic]) event = Finished.from_results(results, running_time=1.0) # When test results are displayed default.display_statistic(execution_context, event) lines = [line for line in capsys.readouterr().out.split("\n") if line] failed = strip_style_win32(click.style("FAILED", bold=True, fg="red")) passed = strip_style_win32(click.style("PASSED", bold=True, fg="green")) # Then all check results should be properly displayed with relevant colors assert lines[2:4] == [ f" not_a_server_error 3 / 5 passed {failed} ", f" different_check 1 / 1 passed {passed} ", ]
def test_display_single_error(capsys, swagger_20, operation, execution_context, show_errors_tracebacks): # Given exception is multiline exception = None try: exec("some invalid code") except SyntaxError as exc: exception = exc result = models.TestResult(operation.method, operation.path, DataGenerationMethod.default()) result.add_error(exception) # When the related test result is displayed execution_context.show_errors_tracebacks = show_errors_tracebacks default.display_single_error(execution_context, SerializedTestResult.from_test_result(result)) lines = capsys.readouterr().out.strip().split("\n") # Then it should be correctly formatted and displayed in red color if sys.version_info <= (3, 8): expected = ' File "<string>", line 1\n some invalid code\n ^\nSyntaxError: invalid syntax\n' else: expected = ' File "<string>", line 1\n some invalid code\n ^\nSyntaxError: invalid syntax\n' if show_errors_tracebacks: lines = click.unstyle("\n".join(lines)).split("\n") assert lines[1] == "Traceback (most recent call last):" # There is a path on the next line, it is simpler to not check it since it doesn't give much value # But presence of traceback itself is checked expected = f' exec("some invalid code")\n{expected}' assert "\n".join(lines[3:8]) == expected.strip("\n") else: assert "\n".join(lines[1:6]) == strip_style_win32( click.style(expected, fg="red")).rstrip("\n")
def test_display_single_failure(capsys, swagger_20, execution_context, endpoint, body): # Given a single test result with multiple successful & failed checks success = models.Check("not_a_server_error", models.Status.success) failure = models.Check("not_a_server_error", models.Status.failure, models.Case(endpoint, body=body)) test_statistic = models.TestResult( endpoint, DataGenerationMethod.default(), [success, success, success, failure, failure, models.Check("different_check", models.Status.success)], ) # When this failure is displayed default.display_failures_for_single_test(execution_context, SerializedTestResult.from_test_result(test_statistic)) out = capsys.readouterr().out lines = out.split("\n") # Then the endpoint name is displayed as a subsection assert " GET: /v1/success " in lines[0] # And body should be displayed if it is not NOT_SET if body is NOT_SET: assert "Body" not in out else: assert strip_style_win32(click.style(f"Body : {body}", fg="red")) in lines # And empty parameters are not present in the output assert "Path parameters" not in out # And not needed attributes are not displayed assert "Path" not in out assert "Method" not in out assert "Base url" not in out
def test_display_failures(swagger_20, capsys, execution_context, results_set, verbosity, response): execution_context.verbosity = verbosity # Given two test results - success and failure operation = models.APIOperation("/api/failure", "GET", {}, base_url="http://127.0.0.1:8080", schema=swagger_20) failure = models.TestResult( operation.method, operation.full_path, verbose_name=f"{operation.method} {operation.full_path}", data_generation_method=DataGenerationMethod.default(), ) failure.add_failure("test", models.Case(operation), response, 0, "Message", None) execution_context.results.append( SerializedTestResult.from_test_result(failure)) results_set.append(failure) event = Finished.from_results(results_set, 1.0) # When the failures are displayed default.display_failures(execution_context, event) out = capsys.readouterr().out.strip() # Then section title is displayed assert " FAILURES " in out # And operation with a failure is displayed as a subsection assert " GET /v1/api/failure " in out assert "Message" in out assert "Run this cURL command to reproduce this failure:" in out headers = f"-H 'Content-Length: 0' -H 'Content-Type: application/json' -H 'User-Agent: {USER_AGENT}'" assert f"curl -X GET {headers} http://127.0.0.1:8080/api/failure" in out
def results_set(operation): statistic = models.TestResult( operation.method, operation.full_path, data_generation_method=DataGenerationMethod.default(), verbose_name=f"{operation.method} {operation.full_path}", ) return models.TestResultSet([statistic])
def results_set(operation): statistic = models.TestResult( operation.method, operation.full_path, data_generation_method=DataGenerationMethod.default()) return models.TestResultSet([statistic])
def results_set(endpoint): statistic = models.TestResult(endpoint, data_generation_method=DataGenerationMethod.default()) return models.TestResultSet([statistic])
def test_deprecated_attribute(swagger_20): operation = APIOperation("/users/{name}", "GET", {}, swagger_20, base_url="http://127.0.0.1/api/v3") case = Case(operation) with pytest.warns(None) as records: assert case.endpoint == case.operation == operation assert str(records[0].message) == ( "Property `endpoint` is deprecated and will be removed in Schemathesis 4.0. Use `operation` instead." ) @pytest.mark.parametrize("method", DataGenerationMethod.all()) @pytest.mark.hypothesis_nested def test_data_generation_method_is_available(method, empty_open_api_3_schema): # When a new case is generated empty_open_api_3_schema["paths"] = { "/data": { "post": { "requestBody": { "required": True, "content": { "text/plain": { "schema": { "type": "string" } } },