def validate_against_openapi_schema(content: Dict[str, Any], endpoint: str, method: str, response: str) -> bool: """Compare a "content" dict with the defined schema for a specific method in an endpoint. Return true if validated and false if skipped. """ # This first set of checks are primarily training wheels that we # hope to eliminate over time as we improve our API documentation. # No 500 responses have been documented, so skip them if response.startswith('5'): return False if endpoint not in openapi_spec.spec()['paths'].keys(): match = match_against_openapi_regex(endpoint) # If it doesn't match it hasn't been documented yet. if match is None: return False endpoint = match # Excluded endpoint/methods if endpoint + ':' + method in EXCLUDE_UNDOCUMENTED_ENDPOINTS: return False # Return true for endpoints with only response documentation remaining if endpoint + ':' + method in EXCLUDE_DOCUMENTED_ENDPOINTS: return True # Check if the response matches its code if response.startswith('2') and (content.get('result', 'success').lower() != 'success'): raise SchemaError( "Response is not 200 but is validating against 200 schema") # Code is not declared but appears in various 400 responses. If # common, it can be added to 400 response schema if response.startswith('4'): # This return statement should ideally be not here. But since # we have not defined 400 responses for various paths this has # been added as all 400 have the same schema. When all 400 # response have been defined this should be removed. return True # The actual work of validating that the response matches the # schema is done via the third-party OAS30Validator. schema = get_schema(endpoint, method, response) validator = OAS30Validator(schema) validator.validate(content) return True
def test_allof_required(self): schema = { "allOf": [ { "type": "object", "properties": { "some_prop": { "type": "string" } } }, { "type": "object", "required": ["some_prop"] }, ] } validator = OAS30Validator(schema, format_checker=oas30_format_checker) with pytest.raises(ValidationError, match="'some_prop' is a required property"): validator.validate({"another_prop": "bla"})
def test_oneof_discriminator(self, schema_type): # We define a few components schemas components = { "MountainHiking": { "type": "object", "properties": { "discipline": { "type": "string", # we allow both the explicitely matched mountain_hiking discipline # and the implicitely matched MoutainHiking discipline "enum": ["mountain_hiking", "MountainHiking"] }, "length": { "type": "integer", } }, "required": ["discipline", "length"] }, "AlpineClimbing": { "type": "object", "properties": { "discipline": { "type": "string", "enum": ["alpine_climbing"] }, "height": { "type": "integer", }, }, "required": ["discipline", "height"] }, "Route": { # defined later } } components['Route'][schema_type] = [ { "$ref": "#/components/schemas/MountainHiking" }, { "$ref": "#/components/schemas/AlpineClimbing" }, ] # Add the compoments in a minimalis schema schema = { "$ref": "#/components/schemas/Route", "components": { "schemas": components } } if schema_type != 'allOf': # use jsonschema validator when no discriminator is defined validator = OAS30Validator(schema, format_checker=oas30_format_checker) with pytest.raises( ValidationError, match="is not valid under any of the given schemas"): validator.validate( {"something": "matching_none_of_the_schemas"}) assert False if schema_type == 'anyOf': # use jsonschema validator when no discriminator is defined validator = OAS30Validator(schema, format_checker=oas30_format_checker) with pytest.raises( ValidationError, match="is not valid under any of the given schemas"): validator.validate( {"something": "matching_none_of_the_schemas"}) assert False discriminator = { "propertyName": "discipline", "mapping": { "mountain_hiking": "#/components/schemas/MountainHiking", "alpine_climbing": "#/components/schemas/AlpineClimbing", } } schema['components']['schemas']['Route'][ 'discriminator'] = discriminator # Optional: check we return useful result when the schema is wrong validator = OAS30Validator(schema, format_checker=oas30_format_checker) with pytest.raises(ValidationError, match="does not contain discriminating property"): validator.validate({"something": "missing"}) assert False # Check we get a non-generic, somehow usable, error message when a discriminated schema is failing with pytest.raises(ValidationError, match="'bad_string' is not of type integer"): validator.validate({ "discipline": "mountain_hiking", "length": "bad_string" }) assert False # Check explicit MountainHiking resolution validator.validate({"discipline": "mountain_hiking", "length": 10}) # Check implicit MountainHiking resolution validator.validate({"discipline": "MountainHiking", "length": 10}) # Check non resolvable implicit schema with pytest.raises( ValidationError, match= "reference '#/components/schemas/other' could not be resolved" ): result = validator.validate({"discipline": "other"}) assert False
def validate_against_openapi_schema( content: Dict[str, Any], path: str, method: str, status_code: str, display_brief_error: bool = False, ) -> bool: """Compare a "content" dict with the defined schema for a specific method in an endpoint. Return true if validated and false if skipped. """ # This first set of checks are primarily training wheels that we # hope to eliminate over time as we improve our API documentation. # No 500 responses have been documented, so skip them if status_code.startswith("5"): return False if path not in openapi_spec.openapi()["paths"].keys(): endpoint = find_openapi_endpoint(path) # If it doesn't match it hasn't been documented yet. if endpoint is None: return False else: endpoint = path # Excluded endpoint/methods if (endpoint, method) in EXCLUDE_UNDOCUMENTED_ENDPOINTS: return False # Return true for endpoints with only response documentation remaining if (endpoint, method) in EXCLUDE_DOCUMENTED_ENDPOINTS: return True # Check if the response matches its code if status_code.startswith("2") and (content.get( "result", "success").lower() != "success"): raise SchemaError( "Response is not 200 but is validating against 200 schema") # Code is not declared but appears in various 400 responses. If # common, it can be added to 400 response schema if status_code.startswith("4"): # This return statement should ideally be not here. But since # we have not defined 400 responses for various paths this has # been added as all 400 have the same schema. When all 400 # response have been defined this should be removed. return True # The actual work of validating that the response matches the # schema is done via the third-party OAS30Validator. schema = get_schema(endpoint, method, status_code) if endpoint == "/events" and method == "get": # This a temporary function for checking only documented events # as all events haven't been documented yet. # TODO: Remove this after all events have been documented. fix_events(content) validator = OAS30Validator(schema) try: validator.validate(content) except JsonSchemaValidationError as error: if not display_brief_error: raise error # display_brief_error is designed to avoid printing 1000 lines # of output when the schema to validate is extremely large # (E.g. the several dozen format variants for individual # events returned by GET /events) and instead just display the # specific variant we expect to match the response. brief_error_display_schema = {"nullable": False, "oneOf": list()} brief_error_display_schema_oneOf = [] brief_error_validator_value = [] for validator_value in error.validator_value: if validator_value["example"]["type"] == error.instance["type"]: brief_error_validator_value.append(validator_value) for i_schema in error.schema["oneOf"]: if i_schema["example"]["type"] == error.instance["type"]: brief_error_display_schema_oneOf.append(i_schema) brief_error_display_schema["oneOf"] = brief_error_display_schema_oneOf # Field list from https://python-jsonschema.readthedocs.io/en/stable/errors/ raise JsonSchemaValidationError( message=error.message, validator=error.validator, path=error.path, instance=error.instance, schema_path=error.schema_path, schema=brief_error_display_schema, validator_value=brief_error_validator_value, cause=error.cause, ) return True