def test_spec_returns_valid_json(notify_api, sample_notification): with notify_api.test_request_context(): with notify_api.test_client() as client: auth_header = create_authorization_header(service_id=sample_notification.service_id) response = client.get('/spec', headers=[auth_header]) # Check that it’s a valid Swagger schema flex.load( json.loads(response.get_data(as_text=True)) )
def parse(self): """ Parse the Swager file and return the InternalNode hierarchy from it """ schema = flex.load(self.file) paths = schema['paths'] root = InternalNode("", None) def request_validator(req): """ callback used to validate a request """ flex.core.validate_api_request(schema, req) def response_validator(reply, request_method='get', raw_request=None): """ callback used to validate a server response """ flex.core.validate_api_response(schema, reply, request_method=request_method, raw_request=raw_request) for path in paths: url_parts = path.split("/") # get the final node #TODO: resolve '$ref' in path parent = root for part in url_parts: if not part: continue #if part == '$ref': # raise UnimplementedError("Swagger Parser does not support $ref") #Internal Node identify variable with '${.*}', Swagger with '/{.*}' part = SWAGGER_VARNAME_RE.sub("${\\1}", part) child = parent.add_child(part) parent = child resource = paths[path] for verb in resource: parent.add_method(verb, request_validator=request_validator, response_validator=response_validator) return root
def validate_schema(): "validates the api schema" path = join(conf.PROJECT_DIR, 'schema', 'api.yaml') spec = flex.load(path) validate(spec) return True
from flask import Blueprint, Response, request import json # from metamodels import (kami, base_kami) from base.webserver_utils import (apply_on_node_with_parent, get_node_id, apply_on_node) import server.kami.kappa as kappa #import server.kami.anatomizer_tools as anatomizer_tools from server.kami.algebra import concat_test, create_compositions import regraph.tree as tree import flex import os from flex.loading.schema.paths.path_item.operation.responses.single.schema\ import schema_validator YAML = os.path.join(os.path.dirname(__file__) + "/../", 'iRegraph_api.yaml') json_schema_context = flex.load(YAML) kami_blueprint = Blueprint("kami_blueprint", __name__) @kami_blueprint.route("/graph/get_kappa/", methods=["POST"]) @kami_blueprint.route("/graph/get_kappa/<path:path_to_graph>", methods=["POST"]) def get_kappa(path_to_graph=""): """ generates a kappa model from a set of nuggets""" def get_kappa_aux(graph_id, parent_id): hie = kami_blueprint.hie() if "names" not in request.json.keys(): nuggets_ids = [] else: nuggets_names = request.json["names"] nuggets_ids = [
def load_swagger_spec(self): for spec in ('user', 'account', 'global'): spec_file = self.spec_path / f'{spec}.v1beta0.yml' if spec_file.is_file(): self.specs[spec] = flex.load(spec_file.as_posix())
def main(): set_utf8_as_default_encoding() args = parse_args() SCHEMAS, URIS, URI_PATTERNS = load_schema() for URI in URIS: URIS[URI].pop('uri_re_pattern') URIS = resolve_schemas(URIS, SCHEMAS) validate_schemas(URIS) data = { 'swagger': '2.0', 'info': { 'title': 'Metadata_v4', 'description': 'Simple API for demonstrating json validation', 'version': '4.0' }, 'host': 'sync.mod.net', 'schemes': ['https'], 'basePath': API_URI_PATH, 'consumes': ['application/json'], 'produces': ['application/json'] } data['paths'] = URIS for path in data['paths']: path_info = data['paths'][path] if isinstance(path_info, dict): path_info.pop('resource_name', '') path_info.pop('path', '') methods = path_info.pop('methods', []) for method in methods: methods[method]['parameters'] = [] if methods[method].get('info'): info = methods[method].pop('info', {}) methods[method]['description'] = info.get( 'description', '') methods[method]['operationId'] = info.get( 'operationId', '') methods[method]['tags'] = info.get('tags', []) if methods[method].get('request'): request = methods[method].pop('request', {}) if isinstance(request, dict): if not args.without_params: for request_params_group in REQUEST_PARAMS_GROUPS: if request.get(request_params_group['name'], {}): request_params_group_scheme = request.get( request_params_group['name']) request_params_group_list = request_params_group_scheme.get( 'oneOf' ) or request_params_group_scheme.get( 'allOf') or [ request_params_group_scheme ] for request_params in request_params_group_list: if request_params.get( 'properties', {}): params = request_params.get( 'properties', {}) for param in params: key = params[param] key['name'] = param key['type'] = key.get( 'type') or 'string' key['in'] = request_params_group[ 'in'] key['required'] = key.get( 'required', False) or bool( key['name'] in request_params.get( 'required', [])) methods[method][ 'parameters'].append(key) if not args.without_params: if request.get('body', {}): request_body = { 'name': 'request_body', 'in': 'body', 'required': True, 'schema': request['body'].get('scheme') } if request_body['schema']: methods[method]['parameters'].append( request_body) if methods[method].get('response'): response = methods[method].pop('response', {}) responses = { '200': { 'description': 'Good response', 'schema': {}, 'headers': {} }, 'default': { 'description': 'Bad response', 'headers': {} } } if not args.without_headers: if response.get('headers_params_scheme', {}): headers_params_scheme = response[ 'headers_params_scheme'] headers_params_scheme_list = headers_params_scheme.get( 'oneOf') or headers_params_scheme.get( 'allOf') or [headers_params_scheme] for headers_params_scheme in headers_params_scheme_list: if headers_params_scheme.get( 'description', {}): if headers_params_scheme[ 'description'] == STANDART_HEADERS: # TODO: bad hardcoding good_headers = headers_params_scheme.get( 'properties') good_headers = { item[0]: item[1] for item in good_headers.items() if item[0] not in HEADERS_IGNORE_LIST } responses['200'][ 'headers'] = good_headers if headers_params_scheme[ 'description'] == ERROR_HEADERS: # TODO: bad hardcoding bad_headers = headers_params_scheme.get( 'properties') responses['default'][ 'headers'] = bad_headers response_schema = response.get('body', {}).get( 'scheme', {}) if not args.without_body else {} responses['200']['schema'] = response_schema methods[method]['responses'] = responses path_info[method] = methods[method] data['paths'] = { item[0]: item[1] for item in data['paths'].items() if item[0] not in API_URI_PATH_EXCLUDE_LIST } data['paths'] = { item[0].replace(API_URI_PATH, ''): item[1] for item in data['paths'].items() } data = filter_dict_keys(data, lambda value: True if value not in ( None, unicode('null')) else False) # TODO: fixpath_info it in metatada data = old_filter_dict_keys( data, lambda key, value: False if key == 'format' and value not in SWAGGER_ALLOW_FORMATS else True) # TODO: fixpath_info it in metatada data = old_filter_dict_keys( data, lambda key, value: value) # TODO: fipath_info it in metatada # data = old_filter_dict_keys(data, lambda key, value: False if key=='type' and value not in SWAGGER_ALLOW_TYPE else True) #TODO: fixpath_info it in metatada data = shift_dict( data, dict_func=lambda obj: dict( map( lambda item: (item[0], item[1][0]) if bool( isinstance(item[1], (tuple, list)) and item[0] == 'type' and len(item[1]) == 1) else item, obj.items())) if obj.get('type') else obj ) # TODO: change array to string in 'type' if one type data = shift_dict(data, dict_func=merge_additional_properties_to_obj) data = shift_dict(data, dict_func=lambda obj: dict(obj.items() + [ ('type', 'string'), ]) if obj.get('enum') and not obj.get('type') else obj) if not args.with_oneOf: data = shift_dict(data, dict_func=lambda obj: obj.get('oneOf')[0] if isinstance(obj.get('oneOf') (list, tuple)) else obj) if args.format == 'yaml': file_yaml_name = os.path.join(args.path, '%s.yaml' % args.name) try: import yaml with open(file_yaml_name, 'w+') as yaml_shemas_file: yaml.safe_dump(data, yaml_shemas_file) except ImportError as err: sys.stdout.write(str(err)) log.warning(err) if args.format == 'json': file_json_name = os.path.join(args.path, '%s.json' % args.name) with open(file_json_name, 'w+') as json_shemas_file: json.dump(data, json_shemas_file, indent=4) if args.validate: try: import flex if args.format == 'yaml': schema = flex.load(file_yaml_name) elif args.format == 'json': schema = flex.load(file_json_name) print schema except ImportError as err: sys.stdout.write(str(err)) log.warning(err) try: import SwaggerParser if args.format == 'yaml': parser = SwaggerParser(swagger_path=file_yaml_name) elif args.format == 'json': parser = SwaggerParser(swagger_path=file_json_name) print parser except ImportError as err: sys.stdout.write(str(err)) log.warning(err)
def main(): set_utf8_as_default_encoding() args = parse_args() SCHEMAS, URIS, URI_PATTERNS = load_schema() for URI in URIS: URIS[URI].pop('uri_re_pattern') URIS = resolve_schemas(URIS, SCHEMAS) validate_schemas(URIS) data = { 'swagger': '2.0', 'info': { 'title': 'Metadata_v4', 'description': 'Simple API for demonstrating json validation', 'version': '4.0' }, 'host': 'sync.mod.net', 'schemes': [ 'https' ], 'basePath': API_URI_PATH, 'consumes': [ 'application/json' ], 'produces': [ 'application/json' ] } data['paths'] = URIS for path in data['paths']: path_info = data['paths'][path] if isinstance(path_info, dict): path_info.pop('resource_name', '') path_info.pop('path', '') methods = path_info.pop('methods', []) for method in methods: methods[method]['parameters'] = [] if methods[method].get('info'): info = methods[method].pop('info', {}) methods[method]['description'] = info.get('description', '') methods[method]['operationId'] = info.get('operationId', '') methods[method]['tags'] = info.get('tags', []) if methods[method].get('request'): request = methods[method].pop('request', {}) if isinstance(request, dict): if not args.without_params: for request_params_group in REQUEST_PARAMS_GROUPS: if request.get(request_params_group['name'], {}): request_params_group_scheme = request.get(request_params_group['name']) request_params_group_list = request_params_group_scheme.get( 'oneOf') or request_params_group_scheme.get('allOf') or [ request_params_group_scheme] for request_params in request_params_group_list: if request_params.get('properties', {}): params = request_params.get('properties', {}) for param in params: key = params[param] key['name'] = param key['type'] = key.get('type') or 'string' key['in'] = request_params_group['in'] key['required'] = key.get('required', False) or bool( key['name'] in request_params.get('required', [])) methods[method]['parameters'].append(key) if not args.without_params: if request.get('body', {}): request_body = { 'name': 'request_body', 'in': 'body', 'required': True, 'schema': request['body'].get('scheme') } if request_body['schema']: methods[method]['parameters'].append(request_body) if methods[method].get('response'): response = methods[method].pop('response', {}) responses = { '200': { 'description': 'Good response', 'schema': {}, 'headers': {} }, 'default': { 'description': 'Bad response', 'headers': {} } } if not args.without_headers: if response.get('headers_params_scheme', {}): headers_params_scheme = response['headers_params_scheme'] headers_params_scheme_list = headers_params_scheme.get( 'oneOf') or headers_params_scheme.get('allOf') or [headers_params_scheme] for headers_params_scheme in headers_params_scheme_list: if headers_params_scheme.get('description', {}): if headers_params_scheme['description'] == STANDART_HEADERS: # TODO: bad hardcoding good_headers = headers_params_scheme.get('properties') good_headers = {item[0]: item[1] for item in good_headers.items() if item[0] not in HEADERS_IGNORE_LIST} responses['200']['headers'] = good_headers if headers_params_scheme['description'] == ERROR_HEADERS: # TODO: bad hardcoding bad_headers = headers_params_scheme.get('properties') responses['default']['headers'] = bad_headers response_schema = response.get('body', {}).get('scheme', {}) if not args.without_body else {} responses['200']['schema'] = response_schema methods[method]['responses'] = responses path_info[method] = methods[method] data['paths'] = {item[0]: item[1] for item in data['paths'].items() if item[0] not in API_URI_PATH_EXCLUDE_LIST} data['paths'] = {item[0].replace(API_URI_PATH, ''): item[1] for item in data['paths'].items()} data = filter_dict_keys( data, lambda value: True if value not in (None, unicode('null')) else False ) # TODO: fixpath_info it in metatada data = old_filter_dict_keys(data, lambda key, value: False if key == 'format' and value not in SWAGGER_ALLOW_FORMATS else True ) # TODO: fixpath_info it in metatada data = old_filter_dict_keys(data, lambda key, value: value) # TODO: fipath_info it in metatada # data = old_filter_dict_keys(data, lambda key, value: False if key=='type' and value not in SWAGGER_ALLOW_TYPE else True) #TODO: fixpath_info it in metatada data = shift_dict( data, dict_func=lambda obj: dict(map( lambda item: (item[0], item[1][0]) if bool(isinstance(item[1], (tuple, list)) and item[0] == 'type' and len(item[1]) == 1) else item, obj.items())) if obj.get('type') else obj ) # TODO: change array to string in 'type' if one type data = shift_dict(data, dict_func=merge_additional_properties_to_obj) data = shift_dict( data, dict_func=lambda obj: dict(obj.items() + [('type', 'string'), ]) if obj.get('enum') and not obj.get('type') else obj ) if not args.with_oneOf: data = shift_dict( data, dict_func=lambda obj: obj.get('oneOf')[0] if isinstance(obj.get('oneOf')(list, tuple)) else obj ) if args.format == 'yaml': file_yaml_name = os.path.join(args.path, '%s.yaml' % args.name) try: import yaml with open(file_yaml_name, 'w+') as yaml_shemas_file: yaml.safe_dump(data, yaml_shemas_file) except ImportError as err: sys.stdout.write(str(err)) log.warning(err) if args.format == 'json': file_json_name = os.path.join(args.path, '%s.json' % args.name) with open(file_json_name, 'w+') as json_shemas_file: json.dump(data, json_shemas_file, indent=4) if args.validate: try: import flex if args.format == 'yaml': schema = flex.load(file_yaml_name) elif args.format == 'json': schema = flex.load(file_json_name) print schema except ImportError as err: sys.stdout.write(str(err)) log.warning(err) try: import SwaggerParser if args.format == 'yaml': parser = SwaggerParser(swagger_path=file_yaml_name) elif args.format == 'json': parser = SwaggerParser(swagger_path=file_json_name) print parser except ImportError as err: sys.stdout.write(str(err)) log.warning(err)
config = configs.load_config(__file__) PATH = os.path.dirname(os.path.abspath(__file__)) TEMPLATE_ENVIRONMENT = Environment( autoescape=False, loader=FileSystemLoader(os.path.join(PATH, 'templates')), # lstrip_blocks=True, # keep_trailing_newline=True, trim_blocks=True) def render_template(template_filename, context): return TEMPLATE_ENVIRONMENT.get_template(template_filename).render(context) schema = flex.load(configs.config.get("facade.specification_url.v65")) # schema = flex.load(Settings.url_specification_admin_api) generate_for_paths = [ # {'path': '/acc/facebook', 'codes': ['200', '201', '202', '204']}, # {'path': '/data', 'codes': ['200', '201', '202', '204']}, # {'path': '/data/want', 'codes': ['200', '201', '202', '204']}, # {'path': '/data/want/import', 'codes': ['200', '201', '202', '204']}, # {'path': '/data/want_many', 'codes': ['200', '201', '202', '204']}, # {'path': '/data/watch', 'codes': ['200', '201', '202', '204']}, # {'path': '/data/watch/import', 'codes': ['200', '201', '202', '204']}, # {'path': '/discover/suggestions', 'codes': ['200', '201', '202', '204']}, # {'path': '/discussions/{discussion_id}', 'codes': ['200', '201', '202', '204']}, # {'path': '/discussions/{discussion_id}/comments', 'codes': ['200', '201', '202', '204']}, # {'path': '/discussions/{discussion_id}/likes', 'codes': ['200', '201', '202', '204']}, # {'path': '/discussions/{discussion_id}/mention_suggests', 'codes': ['200', '201', '202', '204']},
def test_hello_schema(url): testAPIBasePath = "{}/test/api".format(url) schema = flex.load(testAPIBasePath + '/swagger.json') assert hello_schema(url, schema) == None