def load_specification(self, specification_path: str, spec_format: str): """ This function will load the swagger specification using the swagger_parser. The function will automatically call the `_init_class_resources` after. :param specification_path: The path where the swagger specification is located. :param spec_format: The file format of the specification. """ # If the swagger spec format is not specified explicitly, we try to # derive it from the specification path if not spec_format: filename = os.path.basename(specification_path) extension = filename.rsplit(".", 1)[-1] if extension in YAML_EXTENSIONS: spec_format = SPEC_YAML elif extension in JSON_EXTENSIONS: spec_format = SPEC_JSON else: raise RuntimeError("Could not infer specification format. Use " "--spec-format to specify it explicitly.") click.secho(f"Using spec format '{spec_format}'", fg="green") if spec_format == SPEC_YAML: with open(specification_path, "r") as f: self.parser = SwaggerParser(swagger_yaml=f) else: self.parser = SwaggerParser(swagger_path=specification_path) self._init_class_resources()
def load_specification(self, specification_path, spec_format=None): # If the swagger spec format is not specified explicitly, we try to # derive it from the specification path if not spec_format: filename = os.path.basename(specification_path) extension = filename.rsplit(".", 1)[-1] if extension in YAML_EXTENSIONS: spec_format = SPEC_YAML elif extension in JSON_EXTENSIONS: spec_format = SPEC_JSON else: raise RuntimeError("Could not infer specification format. Use " "--spec-format to specify it explicitly.") click.secho("Using spec format '{}'".format(spec_format), fg="green") if spec_format == SPEC_YAML: with open(specification_path, "r") as f: self.parser = SwaggerParser(swagger_yaml=f) else: self.parser = SwaggerParser(swagger_path=specification_path) # Build (path, http_verb) => operation mapping self.PATH_VERB_OPERATION_MAP = { (path, http_verb): operation for operation, (path, http_verb, tag) in self.parser.operation.items() } self._make_class_definitions()
def load_spec(specification_path, spec_format=None): # If the swagger spec format is not specified explicitly, we try to # derive it from the specification path if not spec_format: filename, file_ext = os.path.splitext(specification_path) if file_ext in YAML_EXTENSIONS: spec_format = SPEC_YAML elif file_ext in JSON_EXTENSIONS: spec_format = SPEC_JSON else: raise RuntimeError( "Could not infer specification format from extension. Use " "--spec-format to specify it explicitly.") click.secho("Using spec format '{}'".format(spec_format), fg="green") parser = None if spec_format == SPEC_YAML: with open(specification_path, "r") as f: parser = SwaggerParser(swagger_yaml=f) elif spec_format == SPEC_JSON: parser = SwaggerParser(swagger_path=specification_path) else: raise RuntimeError("Invalid spec_format {}".format(spec_format)) # Build (path, http_verb) => operation mapping api_definitions = extract_Definitions(parser) # self._classes = {} return for path, verbs in parser.paths.items(): generate_ViewModel(path, verbs)
def from_config(cls, config): parsed_new = SwaggerParser(swagger_path=config.new_spec) parsed_old = SwaggerParser(swagger_path=config.old_spec) cls.old_parsed = parsed_old cls.new_parsed = parsed_new cls.generate() return cls
def generate_swagger_json(self): """Generate a swagger from all the apis swagger.""" # Base swagger base_swagger = { 'swagger': '2.0', 'info': self.yaml_file.get('info'), 'basePath': self.yaml_file.get('basePath'), 'definitions': {}, 'paths': {} } # Merge aggregates self.merge_aggregates(base_swagger) base_swagger = self.exclude_paths(base_swagger) # Change operation id spec = {} uri = {} path_list = {} action_list = {} current_module = sys.modules[__name__] for path, path_spec in base_swagger['paths'].items(): for action, action_spec in path_spec.items(): # Generate function name and get spec and api url for the path func_name = uuid() path_list[func_name] = path action_list[func_name] = action spec[func_name], uri[func_name] = self.get_spec_from_uri(path, action) # Export generated function to a module level function setattr(current_module, func_name, self.generate_operation_id_function(spec, uri, path_list, action_list, func_name)) # Set operationId action_spec['operationId'] = 'swagger_aggregator.{0}'.format(func_name) self.swagger_parser = SwaggerParser(swagger_dict=deepcopy(base_swagger)) # Remove exclude_fields from swagger for definition_name, definition_spec in base_swagger['definitions'].items(): if definition_name in self.yaml_file.get('exclude_fields', {}): for key in self.yaml_file['exclude_fields'][definition_name]: if key in definition_spec['required']: definition_spec['required'].remove(key) if key in definition_spec['properties']: del definition_spec['properties'][key] # Write swagger.yaml with open(os.path.join(os.path.dirname(os.path.realpath(self.config_file)), 'swagger.yaml'), 'w') as f: f.write(yaml.dump(base_swagger, default_flow_style=False))
def setUp(self): """ For swagger-parser, we need to convert the yaml file to json. Create a temp file to store the converted json, and set up the parser :return: """ self.fileTemp = tempfile.NamedTemporaryFile(delete=False) with open('../cert_issuer_identity/swagger/swagger.yaml', 'r') as f: doc = yaml.load(f) with open(self.fileTemp.name, 'w') as fp: json.dump(doc, fp, indent=4) self.fileTemp.close() self.parser = SwaggerParser( swagger_path=self.fileTemp.name) # Init with file
class Swaggering(object): @args.attribute(short='i') def input(self): return @args.attribute(short='o') def output(self): return def __init__(self): self.parser = SwaggerParser(swagger_path=self.input()) if self.output(): self._output = open(self.output(), 'w') else: self._output = sys.stdout def __del__(self): if self.output(): self._output.close() @args.operation def paths(self): for path in self.parser.paths: self._output.write('%s\n' % path) return @args.operation def spec(self, path): ''' show the path spec :param path: the path ''' return self.parser.get_path_spec(path)
def init_app(self, app, swagger_file=None): if not swagger_file: raise Exception('Swagger file not specified') with open(swagger_file) as f: swagger_yaml = f.read() if not swagger_yaml: raise Exception('No swagger found') self.parser = SwaggerParser(swagger_yaml=swagger_yaml) for path, path_config in self.parser.paths.items(): for method, method_config in path_config.items(): found = False # print(app.url_map) for rule in app.url_map.iter_rules(): if rule.rule == path and method.upper() in rule.methods: found = True # print(dir(rule.map.iter_rules)) if not found: print( "Warning: spec route not implemented; using example from Swagger: " + method.upper() + " " + path) app.add_url_rule(path, path + method, self.resolve_example(method_config), methods=[method]) app.before_request(self.before_request)
def test_swagger(): app = get_app() # test the APIs res = app.get('/__api__') # make sure it's compliant parser = SwaggerParser(swagger_dict=yaml.load(res.body)) spec = parser.specification assert spec['info']['version'] == __version__ assert spec['schemes'] == ['https'] assert spec['host'] == 'localhost:80' _values = { 'prod': 'firefox', 'channel': 'beta', 'locale': 'fr', 'territory': 'fr', 'dist': 'dist', 'distver': 'distver', 'cohort': 'default', 'ver': '34' } # now testing that every GET endpoint is present for path, items in spec['paths'].items(): for verb, options in items.items(): if verb.upper() != 'GET': continue statuses = [ int(st) for st in options['responses'].keys() if st != '404' ] app.get(path.format(**_values), status=statuses)
class TestIssuerIdentity(unittest.TestCase): def setUp(self): """ For swagger-parser, we need to convert the yaml file to json. Create a temp file to store the converted json, and set up the parser :return: """ self.fileTemp = tempfile.NamedTemporaryFile(delete=False) with open('../cert_issuer_identity/swagger/swagger.yaml', 'r') as f: doc = yaml.load(f) with open(self.fileTemp.name, 'w') as fp: json.dump(doc, fp, indent=4) self.fileTemp.close() self.parser = SwaggerParser( swagger_path=self.fileTemp.name) # Init with file def tearDown(self): """ Delete the temp file we created in setUp :return: """ os.remove(self.fileTemp.name) def test_validate_request_invalid(self): test = { "bitcoinAddress": "12jukZaXRLLbNRY9SB8KBQ14D1uK1EVKnA", "comments": "string", "firstName": "string", "lastName": "string" } # Validate that the given data match a path specification result = self.parser.validate_request('/intro/', 'post', body=test) self.assertFalse(result)
def __init__(self, swagger_address, custom_base_path=None): json_object = requests.get(swagger_address).json() if custom_base_path is not None: json_object[ 'basePath'] = custom_base_path + json_object['basePath'] self.swagger = SwaggerParser(swagger_dict=json_object)
def __init__(self, path_or_url, verbose=False): self._content = get_content(path_or_url) self._parser = SwaggerParser(swagger_dict=self._content) self.spec = self._parser.specification self.session = requests.Session() self.verbose = verbose self.host = self.spec['host'] schemes = self.spec.get('schemes', ['https']) self.scheme = schemes[0] self._operations = self._get_operations()
class APITester(): def __init__(self, swaggerFile, apiCaller, basepath=None): self._parser = SwaggerParser(swagger_path=swaggerFile) self._apiCaller = apiCaller self._basepath = self._parser.base_path if (basepath is None) else basepath def doTest(self): expectedSchema = genson.Schema() observedSchema = genson.Schema() for path, pathSpec in self._parser.paths.iteritems(): callPath = path # If a baseURL different from the the one specified in swaggerFile # then we the default for the given one. if self._basepath!=self._parser.base_path: callPath = callPath.replace(self._parser.base_path, self._basepath) for method, methodDesc in pathSpec.iteritems(): for status, responseClass in methodDesc['responses'].iteritems(): if not status == '200': # print ' Ignoring non 200 status:', status, responseClass continue if method=='get': # Is self the best way to get the expected repsonse ? expectedResponse = self._parser.get_response_example(self._parser.paths[path][method]['responses'][status]) observedResponse = self._apiCaller.get(callPath) elif method=='post': dummyData = self._parser.get_send_request_correct_body(path, 'post') expectedResponse = self._parser.get_response_example(self._parser.paths[path][method]['responses'][status]) observedResponse = self._apiCaller.post(callPath, dummyData) else: print 'What do we do for method',method,'?' continue expected = expectedSchema.add_object(expectedResponse).to_dict() observed = observedSchema.add_object(observedResponse).to_dict() if expected!=observed: listDiffs = getDictDiff(expected, observed) raise APIException('API response does not match schema while calling: ' + path, listDiffs)
def generate_swagger_json(self): """Generate a swagger from all the apis swagger.""" # Base swagger base_swagger = { "swagger": "2.0", "info": self.yaml_file.get("info"), "basePath": self.yaml_file.get("basePath"), "definitions": {}, "paths": {}, } # Merge aggregates self.merge_aggregates(base_swagger) base_swagger = self.exclude_paths(base_swagger) # Change operation id spec = {} uri = {} path_list = {} action_list = {} current_module = sys.modules[__name__] for path, path_spec in base_swagger["paths"].items(): for action, action_spec in path_spec.items(): # Generate function name and get spec and api url for the path func_name = uuid() path_list[func_name] = path action_list[func_name] = action spec[func_name], uri[func_name] = self.get_spec_from_uri(path, action) # Export generated function to a module level function setattr( current_module, func_name, self.generate_operation_id_function(spec, uri, path_list, action_list, func_name), ) # Set operationId action_spec["operationId"] = "swagger_aggregator.{0}".format(func_name) self.swagger_parser = SwaggerParser(swagger_dict=deepcopy(base_swagger)) # Remove exclude_fields from swagger for definition_name, definition_spec in base_swagger["definitions"].items(): if definition_name in self.yaml_file.get("exclude_fields", {}): for key in self.yaml_file["exclude_fields"][definition_name]: if key in definition_spec["required"]: definition_spec["required"].remove(key) if key in definition_spec["properties"]: del definition_spec["properties"][key] # Write swagger.yaml with open(os.path.join(os.path.dirname(os.path.realpath(self.config_file)), "swagger.yaml"), "w") as f: f.write(yaml.dump(base_swagger, default_flow_style=False))
def get_all_apis(swagger_file): parser = SwaggerParser(swagger_path=swagger_file) operation = parser.operation opdic = {} for k, v in operation.items(): opdic[v[0]] = k.split('.')[1] specification = parser.specification basePath = specification.get('basePath') # print(json.dumps(specification)) paths = specification.get('paths') apis = [] # print(len(paths)) for path, v in paths.items(): for method, v1 in v.items(): api = {} uri = basePath + path api['uri'] = uri ps = [] # print method ,uri # print v1 api['method'] = method api['desc'] = v1.get('summary') # print(v1.get('summary')) parameters = v1.get('parameters', []) for p in parameters: if (p.get('in') == 'body'): post_ps = [] defi = p['schema']['$ref'].replace('#/definitions/', '') definitions = specification.get('definitions').get(defi) properties = definitions.get("properties") for p_name, p_info in properties.items(): post_np = {} post_np['p_name'] = p_name post_np['p_type'] = p_info.get('type', 'string') post_np['p_des'] = p_info.get('description', '') post_np['p_in'] = 'body' post_ps.append(post_np) ps = ps + post_ps else: np = {} print p np['p_in'] = p["in"] np['p_des'] = p.get("description", "") np['p_type'] = p["type"] np['p_type'] = p.get("type", 'string') np['p_name'] = p["name"] ps.append(np) api['ps'] = ps # print len(api) print method, v1.get('summary') apis.append(api) print(len(apis)) return apis
def pytest_generate_tests(metafunc): if 'path' in metafunc.fixturenames: from swagger_parser import SwaggerParser parser = SwaggerParser(swagger_path='../api.yaml') requests = [] for path_name, path_spec in parser.paths.iteritems(): params = [] for param_name, param_spec in path_spec['get']['parameters'].iteritems(): params.append('{}={}'.format(param_name, param_spec['default'])) requests.append('{}{}'.format(path_name, '' if not params else '?{}'.format('&'.join(params)))) metafunc.parametrize("path", requests)
def _load_spec(self): filename, file_ext = os.path.splitext(self.specification_path) if file_ext in YAML_EXTENSIONS: spec_format = SPEC_YAML elif file_ext in JSON_EXTENSIONS: spec_format = SPEC_JSON else: supported_extensions = ",".join(YAML_EXTENSIONS) + ',' + ','.join( JSON_EXTENSIONS) raise RuntimeError( "Could not infer specification format from extension. Supported Extensions: {}" .format(supported_extensions)) click.secho("Using spec format '{}'".format(spec_format), fg="green") if spec_format == SPEC_YAML: with open(self.specification_path, "r") as f: self.parser = SwaggerParser(swagger_yaml=f) elif spec_format == SPEC_JSON: self.parser = SwaggerParser(swagger_path=self.specification_path) else: raise RuntimeError("Invalid spec_format {}".format(spec_format))
def load_specification(self, specification_path, spec_format=None): # If the swagger spec format is not specified explicitly, we try to # derive it from the specification path if not spec_format: filename = os.path.basename(specification_path) extension = filename.rsplit(".", 1)[-1] if extension in YAML_EXTENSIONS: spec_format = SPEC_YAML elif extension in JSON_EXTENSIONS: spec_format = SPEC_JSON else: raise RuntimeError("Could not infer specification format. Use " "--spec-format to specify it explicitly.") click.secho("Using spec format '{}'".format(spec_format), fg="green") if spec_format == SPEC_YAML: with open(specification_path, "r") as f: self.parser = SwaggerParser(swagger_yaml=f) else: self.parser = SwaggerParser(swagger_path=specification_path) self._make_resource_definitions()
def __init__(self, path_or_url, verbose=False, loop=None, stream=None): self._content = get_content(path_or_url) self._parser = SwaggerParser(swagger_dict=self._content) self.spec = self._parser.specification self.verbose = verbose self.host = self.spec['host'] if 'basePath' in self.spec: self.host += self.spec['basePath'] schemes = self.spec.get('schemes', ['https']) self.scheme = schemes[0] self.running = True self._operations = self._get_operations() self.session = LoggedClientSession(loop, stream, verbose=verbose)
def swagger_url_patterns(swagger_dict, function_map): # Init with dictionary parser = SwaggerParser(swagger_dict=swagger_dict) tree = parser resource_map = {} _parse_swagger_child(tree, resource_map) patterns = _generate_swagger_patterns(resource_map, function_map, tree) return patterns
def swagger_stub(swagger_files_url): """Fixture to stub a microservice from swagger files. To use this fixture you need to define a swagger fixture named swagger_files_url with the path to your swagger files, and the url to stub. Then just add this fixture to your tests and your request pointing to the urls in swagger_files_url will be managed by the stub. Example: @pytest.fixture def swagger_files_url(): return [('tests/swagger.yaml', 'http://localhost:8000')] """ httpretty.enable() for i in swagger_files_url: # Get all given swagger files and url base_url = i[1] s = SwaggerParser(i[0]) swagger_url[base_url] = s # Register all urls httpretty.register_uri(httpretty.GET, re.compile(base_url + r'/.*'), body=get_data_from_request) httpretty.register_uri(httpretty.POST, re.compile(base_url + r'/.*'), body=get_data_from_request) httpretty.register_uri(httpretty.PUT, re.compile(base_url + r'/.*'), body=get_data_from_request) httpretty.register_uri(httpretty.PATCH, re.compile(base_url + r'/.*'), body=get_data_from_request) httpretty.register_uri(httpretty.DELETE, re.compile(base_url + r'/.*'), body=get_data_from_request) memory[base_url] = StubMemory(s) yield memory[base_url] # Close httpretty httpretty.disable() httpretty.reset()
def test_swagger_view(self): res = self.testapp.get('/__api__', status=200) # make sure it's compliant parser = SwaggerParser(swagger_dict=yaml.load(res.body)) spec = parser.specification self.assertEqual(spec['info']['version'], __version__) self.assertEqual(spec['schemes'], ['https']) self.assertEqual(spec['host'], 'shavar.stage.mozaws.net') # now testing that every GET endpoint is present for path, items in spec['paths'].items(): for verb, options in items.items(): verb = verb.upper() if verb != 'GET': continue statuses = [int(st) for st in options['responses'].keys()] res = self.testapp.get(path, status=statuses)
def __init__(self, name, import_name, swagger_spec, static_folder=None, static_url_path=None, template_folder=None, url_prefix=None, subdomain=None, url_defaults=None, root_path=None): init = super(SwaggerBlueprint, self).__init__ init(name, import_name, static_folder, static_url_path, template_folder, url_prefix, subdomain, url_defaults, root_path) self._content = get_content(swagger_spec) self._parser = SwaggerParser(swagger_dict=self._content) self.spec = self._parser.specification self.ops = self._get_operations()
def get_genes_from_file(target_file): parser = SwaggerParser(swagger_path='../swagger/swagger.json') paths = get_paths(parser) paths_dict = {} dict_poss_numbers, dict_poss_string = possible_values_parser.load_possible_genes_values( target_file) # Iterating over all paths (path is an structure in the swagger file) for path, info in paths.items(): verbs_dict = {} # Iterating over all verbs for verb, value in info.items(): str(verb) paramList = value["parameters"] genes_list = [] for param in paramList: name = param["name"] required = param["required"] type = param["type"] if param["type"] == 'integer': numberGen = NumberGen(name, 0, int(param["minimum"]), int(param["maximum"]), dict_poss_numbers) genes_list.append(numberGen) if param["type"] == 'string': stringGen = StringGen(name, required, "", 3, dict_poss_string) genes_list.append(stringGen) # Adding genes list to the verbs_dict verbs_dict[verb] = genes_list # Adding genes list to the verbs_dict paths_dict[path] = verbs_dict return paths_dict
def wrapper(self, *args, **kwargs): yaml_file = os.path.join(os.getcwd(), 'swagger_server/swagger/swagger.yaml') parsed_file = SwaggerParser(swagger_path=yaml_file) method_name = function.__name__.split('_', 1)[-1] _uri, _method, _tag = parsed_file.operation[method_name] if _method in ['POST', 'post']: for each_param in parsed_file.paths[_uri][_method][ 'parameters'].keys(): if each_param == 'body': definition = parsed_file.paths[_uri][_method][ 'parameters'][each_param]['schema']['$ref'].split( '/')[-1] body = parsed_file.specification['definitions'][ definition]['example'] setattr(self, 'body', body) if each_param == 'story_number': story_number = parsed_file.paths[_uri][_method][ 'parameters'][each_param]['x-example'] setattr(self, 'story_number', story_number) response = function(self, *args, **kwargs) return response
class Generator(object): def __init__(self, output_dir, module_name=DEFAULT_MODULE, verbose=False, rest_server_url=None, permissions=False): self.parser = None self.module_name = module_name self._resources = None self.verbose = verbose self.output_dir = output_dir self.rest_server_url = rest_server_url self.permissions = permissions def load_specification(self, specification_path, spec_format=None): # If the swagger spec format is not specified explicitly, we try to # derive it from the specification path if not spec_format: filename = os.path.basename(specification_path) extension = filename.rsplit(".", 1)[-1] if extension in YAML_EXTENSIONS: spec_format = SPEC_YAML elif extension in JSON_EXTENSIONS: spec_format = SPEC_JSON else: raise RuntimeError("Could not infer specification format. Use " "--spec-format to specify it explicitly.") click.secho("Using spec format '{}'".format(spec_format), fg="green") if spec_format == SPEC_YAML: with open(specification_path, "r") as f: self.parser = SwaggerParser(swagger_yaml=f) else: self.parser = SwaggerParser(swagger_path=specification_path) self._make_resource_definitions() def _get_definition_from_ref(self, definition): if "$ref" in definition: definition_name = \ self.parser.get_definition_name_from_ref(definition["$ref"]) ref_def = self.parser.specification["definitions"][definition_name] title = definition_name.replace("_", " ").title().replace(" ", "") return ref_def, title else: return definition, None def _get_resource_attributes(self, resource_name, properties, definition, suffix, fields=None): attributes = [] found_reference = False for name, details in properties.items(): # Check for desired fields and continue if not in there. if fields is not None: if name not in fields: continue # Handle reference definition _property, title = self._get_definition_from_ref(details) if _property.get("properties", None) is not None: continue attribute = { "source": name, "type": _property.get("type", None), "required": name in definition.get("required", []), "read_only": _property.get("readOnly", False) if suffix == "Input" else False } # Add DisabledInput to Imports if read_only is true. if attribute["read_only"] and "DisabledInput" \ not in self._resources[resource_name]["imports"]: self._resources[resource_name]["imports"].append( "DisabledInput") # Based on the type/format combination get the correct # AOR component to use. related_field = False if attribute["type"] in COMPONENT_MAPPING[suffix]: # Check if it is a related field or not if _property.get("x-related-info", None) is not None: if self.permissions and not found_reference: found_reference = True custom_imports = [ custom["name"] for custom in self._resources[resource_name]["custom_imports"] ] if "EmptyField" not in custom_imports: self._resources[resource_name]["custom_imports"].append( CUSTOM_IMPORTS["empty"] ) related_info = _property["x-related-info"] model = related_info.get("model", False) # Check if related model is not set to None. if model is not None: related_field = True # If model didn't even exist then attempt to guess the model # from the substring before the last "_". if not model: model = name.rsplit("_", 1)[0] attribute["label"] = model.replace("_", " ").title() # If a custom base path has been given set the reference to it # else attempt to get the plural of the given model. if related_info.get("rest_resource_name", None) is not None: reference = related_info["rest_resource_name"] else: reference = words.plural(model.replace("_", "")) attribute["reference"] = reference # Get the option text to be used in the Select input from the # label field, else guess it from the current property name. guess = name.rsplit("_", 1)[1] label = related_info.get("label", None) or guess attribute["option_text"] = label elif name.endswith("_id"): related_field = True relation = name.replace("_id", "") attribute["label"] = relation.title() attribute["reference"] = words.plural(relation) attribute["related_field"] = "id" # LongTextFields don't exist # Handle component after figuring out if a related field or not. if not related_field: if _property.get("format", None) in COMPONENT_MAPPING[suffix]: # DateTimeField is currently not supported. if suffix == "Field" and _property["format"] == "date-time": _type = "date" else: _type = _property["format"] attribute["component"] = \ COMPONENT_MAPPING[suffix][_type] else: attribute["component"] = \ COMPONENT_MAPPING[suffix][attribute["type"]] else: attribute["component"] = \ COMPONENT_MAPPING[suffix]["relation"] if suffix != "Input": attribute["related_component"] = \ COMPONENT_MAPPING[suffix][attribute["type"]] else: attribute["related_component"] = "SelectInput" # Handle an enum possibility if _property.get("enum", None) is not None: attribute["component"] = COMPONENT_MAPPING[suffix]["enum"] # Only add choices if an input if suffix == "Input": attribute["choices"] = _property["enum"] if attribute.get("component", None) is not None: # Add component to resource imports if not there. if attribute["component"] not in \ self._resources[resource_name]["imports"] and \ attribute["component"] not in CUSTOM_COMPONENTS: self._resources[resource_name]["imports"].append( attribute["component"] ) # Add related component to resource imports if not there. if attribute.get("related_component", None) is not None: if attribute["related_component"] not in \ self._resources[resource_name]["imports"]: self._resources[resource_name]["imports"].append( attribute["related_component"] ) attributes.append(attribute) # Check for custom import types here. _type = "{}-{}".format(attribute["type"], suffix.lower()) _format = "{}-{}".format(_property.get("format", ""), suffix.lower()) if _type in CUSTOM_IMPORTS or _format in CUSTOM_IMPORTS: custom_imports = [ custom["name"] for custom in self._resources[resource_name]["custom_imports"] ] _import = CUSTOM_IMPORTS.get( _format) or CUSTOM_IMPORTS.get(_type) if _import["name"] not in custom_imports: self._resources[resource_name]["custom_imports"].append( _import ) return attributes def _get_resource_from_definition(self, resource_name, head_component, definition, permissions=None): self._resources[resource_name][head_component] = { "permissions": permissions or [] } suffix = COMPONENT_SUFFIX[head_component] properties = definition.get("properties", {}) resource = self._get_resource_attributes( resource_name=resource_name, properties=properties, definition=definition, suffix=suffix ) # Only add if there is something in resource if resource: self._resources[resource_name][head_component]["fields"] = resource # Check if there are inline models for the given resource. inlines = self.parser.specification.get( "x-detail-page-definitions", None ) # Inlines are only shown on the Show and Edit components. if inlines is not None and head_component in ["show", "edit"]: if resource_name in inlines: if self.permissions: custom_imports = [ custom["name"] for custom in self._resources[resource_name]["custom_imports"] ] if "EmptyField" not in custom_imports: self._resources[resource_name]["custom_imports"].append( CUSTOM_IMPORTS["empty"] ) self._resources[resource_name][head_component]["inlines"] = [] inlines = inlines[resource_name]["inlines"] for inline in inlines: model = inline["model"] label = inline.get("label", None) # If a custom base path has been given. if inline.get("rest_resource_name", None) is not None: reference = inline["rest_resource_name"] else: reference = words.plural(model.replace("_", "")) fields = inline.get("fields", None) many_field = { "label": label or model.replace("_", " ").title(), "reference": reference, "target": inline["key"], "component": COMPONENT_MAPPING[suffix]["many"] } # Add ReferenceMany component to imports if many_field["component"] not in \ self._resources[resource_name]["imports"]: self._resources[resource_name]["imports"].append( many_field["component"] ) inline_def = \ self.parser.specification["definitions"][inline["model"]] properties = inline_def.get("properties", {}) many_field["fields"] = self._get_resource_attributes( resource_name=resource_name, properties=properties, definition=inline_def, suffix="Field", fields=fields ) self._resources[resource_name][head_component]["inlines"].append( many_field ) def _make_resource_definitions(self): self._resources = {} permission_imports_loaded = False for path, verbs in self.parser.specification["paths"].items(): for verb, io in verbs.items(): # Check if this is not a valid path method then skip it. if verb == "parameters": continue else: operation_id = io.get("operationId", "") valid_operation = any([ operation in operation_id for operation in OPERATION_SUFFIXES ]) if operation_id and not valid_operation: continue # Get resource name and path and add it to the list # for the first occurring instance of the resource name = operation_id.split("_")[0] if name not in self._resources: permission_imports_loaded = False self._resources[name] = { "path": path[1:].split("/")[0], "imports": [], "custom_imports": [], "has_methods": False, "filter_lengths": {} } definition = None head_component = None permissions = io.get("x-aor-permissions", []) if self.permissions else None if not permission_imports_loaded and self.permissions: permission_imports_loaded = True self._resources[name]["custom_imports"].append( CUSTOM_IMPORTS["permissions"] ) # Get the correct definition/head_component/component suffix per # verb based on the operation. _create = "create" in operation_id _update = "update" in operation_id if "read" in operation_id: definition, title = self._get_definition_from_ref( definition=io["responses"]["200"]["schema"] ) self._resources[name]["title"] = title or name head_component = "show" # Add show component imports if "Show" not in self._resources[name]["imports"]: self._resources[name]["imports"].append("Show") self._resources[name]["imports"].append( "SimpleShowLayout") elif "list" in operation_id: definition, title = self._get_definition_from_ref( definition=io["responses"]["200"]["schema"]["items"] ) head_component = "list" # Add list component imports if "List" not in self._resources[name]["imports"]: self._resources[name]["imports"].append("List") self._resources[name]["imports"].append("Datagrid") filters = [] filter_imports = [] # Get all method filters for the list component. for parameter in io.get("parameters", []): # If the parameter is a reference, get the actual parameter. if "$ref" in parameter: ref = parameter["$ref"].split("/")[2] param = self.parser.specification["parameters"][ref] else: param = parameter # Filters are only in the query string and their type needs # to be a supported component. if param["in"] == "query" \ and param["type"] in COMPONENT_MAPPING["Input"]\ and not param.get("x-admin-on-rest-exclude", False): # Get component based on the explicit declaration or just the type. declared_input = param.get("x-aor-filter", None) related_input = param.get("x-related-info", None) _type = param["type"] relation = None if declared_input: _range = "-range" if declared_input.get( "range", False) else "" _type = "{_type}{_range}".format( _type=declared_input["format"], _range=_range ) elif related_input: _type = "relation" relation = { "component": COMPONENT_MAPPING["Input"]["enum"], "resource": related_input["rest_resource_name"], "text": related_input.get("label", None) } if relation["component"] not in filter_imports: filter_imports.append(relation["component"]) component = COMPONENT_MAPPING["Input"][_type] # Add props if needed. props = None if _type in PROPS_MAPPING["Input"]: props = PROPS_MAPPING["Input"][_type] # Add component to filter imports if not there. if component not in filter_imports: filter_imports.append(component) source = param["name"] label = source.replace("_", " ").title() _min = param.get("minLength", None) _max = param.get("maxLength", None) if _min or _max: self._resources[name]["filter_lengths"][source] = { "min_length": _min, "max_length": _max } # Handle Array filter types finally! array_validation = param["items"]["type"] \ if _type == "array" else None filters.append({ "source": source, "label": label, "title": label.replace(" ", ""), "component": component, "relation": relation, "props": props, "array": array_validation }) if filters: self._resources[name]["filters"] = { "filters": filters, "imports": filter_imports } elif _create or _update: for parameter in io.get("parameters", []): # If the parameter is a reference, get the actual parameter. if "$ref" in parameter: ref = parameter["$ref"].split("/")[2] param = self.parser.specification["parameters"][ref] else: param = parameter # Grab the body parameter as the create definition if param["in"] == "body": definition, title = self._get_definition_from_ref( definition=param["schema"] ) head_component = "create" if _create else "edit" # Add SimpleForm and the head component to the imports if "SimpleForm" not in self._resources[name]["imports"]: self._resources[name]["imports"].append("SimpleForm") the_import = head_component.title() if the_import not in self._resources[name]["imports"]: self._resources[name]["imports"].append(the_import) elif "delete" in operation_id: self._resources[name]["remove"] = { "permissions": permissions } if head_component and definition: # Toggle to be included in AOR if it has a single method. self._resources[name]["has_methods"] = True self._get_resource_from_definition( resource_name=name, head_component=head_component, definition=definition, permissions=permissions ) @staticmethod def generate_js_file(filename, context): """ Generate a js file from the given specification. :param filename: The name of the template file. :param context: Context to be passed. :return: str """ return render_to_string(filename, context) @staticmethod def add_additional_file(filename): """ Add an additional file, that does not require context, to the generated admin. :return: str """ return render_to_string(filename, {}) def aor_generation(self): click.secho("Generating App.js component file...", fg="green") with open(os.path.join(self.output_dir, "App.js"), "w") as f: data = self.generate_js_file( filename="App.js", context={ "title": self.module_name, "rest_server_url": self.rest_server_url, "resources": self._resources, "supported_components": SUPPORTED_COMPONENTS, "add_permissions": self.permissions }) f.write(data) if self.verbose: print(data) click.secho("Generating Menu.js component file...", fg="green") with open(os.path.join(self.output_dir, "Menu.js"), "w") as f: data = self.generate_js_file( filename="Menu.js", context={ "resources": self._resources }) f.write(data) if self.verbose: print(data) click.secho("Generating resource component files...", fg="blue") resource_dir = self.output_dir + "/resources" if not os.path.exists(resource_dir): os.makedirs(resource_dir) for name, resource in self._resources.items(): title = resource.get("title", None) if title: click.secho("Generating {}.js file...".format( title), fg="green") with open(os.path.join(resource_dir, "{}.js".format(title)), "w") as f: data = self.generate_js_file( filename="Resource.js", context={ "name": title, "resource": resource, "supported_components": SUPPORTED_COMPONENTS, "add_permissions": self.permissions } ) f.write(data) if self.verbose: print(data) click.secho("Generating Filter files for resources...", fg="blue") filter_dir = self.output_dir + "/filters" if not os.path.exists(filter_dir): os.makedirs(filter_dir) for name, resource in self._resources.items(): if resource.get("filters", None) is not None: title = resource.get("title", None) if title: click.secho("Generating {}Filter.js file...".format( title), fg="green") with open(os.path.join(filter_dir, "{}Filter.js".format(title)), "w") as f: data = self.generate_js_file( filename="Filters.js", context={ "title": title, "filters": resource["filters"] }) f.write(data) if self.verbose: print(data) click.secho("Adding basic swagger rest server file...", fg="cyan") with open(os.path.join(self.output_dir, "swaggerRestServer.js"), "w") as f: data = self.generate_js_file( filename="swaggerRestServer.js", context={ "resources": self._resources } ) f.write(data) if self.verbose: print(data) if self.permissions: path_dir = self.output_dir + "/auth" if not os.path.exists(path_dir): os.makedirs(path_dir) with open(os.path.join(path_dir, "PermissionsStore.js"), "w") as f: data = self.generate_js_file( filename="PermissionsStore.js", context={ "resources": self._resources, "supported_components": SUPPORTED_COMPONENTS } ) f.write(data) if self.verbose: print(data) # Generate additional Files for _dir, files in ADDITIONAL_FILES.items(): if _dir != "root": path_dir = "{}/{}".format(self.output_dir, _dir) if not os.path.exists(path_dir): os.makedirs(path_dir) else: path_dir = self.output_dir for file in files: click.secho("Adding {} file...".format(file), fg="cyan") with open(os.path.join(path_dir, file), "w") as f: data = self.add_additional_file(file) f.write(data) if self.verbose: print(data)
def swagger_parser(self): swagger_dict = load_dict_from_path(self.swagger_path) return SwaggerParser(swagger_dict=swagger_dict)
def swagger_parser(): return SwaggerParser('tests/swagger.yaml')
def swagger_file_parser(request): return SwaggerParser(request.param)
def swagger_array_parser(): return SwaggerParser('tests/swagger_arrays.yaml')
def inline_parser(): return SwaggerParser('tests/inline.yaml')
def swagger_test_yield(swagger_yaml_path=None, app_url=None, authorize_error=None, wait_between_test=False, use_example=True): """Test the given swagger api. Yield the action and operation done for each test. Test with either a swagger.yaml path for a connexion app or with an API URL if you have a running API. Args: swagger_yaml_path: path of your YAML swagger file. app_url: URL of the swagger api. authorize_error: dict containing the error you don't want to raise. ex: { 'get': { '/pet/': ['404'] } } Will ignore 404 when getting a pet. wait_between_test: wait between tests (useful if you use Elasticsearch). use_example: use example of your swagger file instead of generated data. Returns: Yield between each test: (action, operation) Raises: ValueError: In case you specify neither a swagger.yaml path or an app URL. """ if authorize_error is None: authorize_error = {} # Init test if swagger_yaml_path is not None: app = connexion.App(__name__, port=8080, debug=True, specification_dir=os.path.dirname(os.path.realpath(swagger_yaml_path))) app.add_api(os.path.basename(swagger_yaml_path)) app_client = app.app.test_client() swagger_parser = SwaggerParser(swagger_yaml_path, use_example=use_example) elif app_url is not None: app_client = requests swagger_parser = SwaggerParser(swagger_dict=requests.get(u'{0}/swagger.json'.format(app_url)).json(), use_example=False) else: raise ValueError('You must either specify a swagger.yaml path or an app url') operation_sorted = {'post': [], 'get': [], 'put': [], 'patch': [], 'delete': []} # Sort operation by action for operation, request in swagger_parser.operation.items(): operation_sorted[request[1]].append((operation, request)) postponed = [] # For every operationId for action in ['post', 'get', 'put', 'patch', 'delete']: for operation in operation_sorted[action]: # Make request path = operation[1][0] action = operation[1][1] request_args = get_request_args(path, action, swagger_parser) url, body, headers, files = get_url_body_from_request(action, path, request_args, swagger_parser) logger.info(u'TESTING {0} {1}'.format(action.upper(), url)) if swagger_yaml_path is not None: response = get_method_from_action(app_client, action)(url, headers=headers, data=body) else: response = get_method_from_action(app_client, action)(u'{0}{1}'.format(app_url.replace(swagger_parser.base_path, ''), url), headers=dict(headers), data=body, files=files) logger.info(u'Got status code: {0}'.format(response.status_code)) # Check if authorize error if (action in authorize_error and path in authorize_error[action] and response.status_code in authorize_error[action][path]): logger.info(u'Got authorized error on {0} with status {1}'.format(url, response.status_code)) yield (action, operation) continue if not response.status_code == 404: # Get valid request and response body body_req = swagger_parser.get_send_request_correct_body(path, action) try: response_spec = swagger_parser.get_request_data(path, action, body_req) except (TypeError, ValueError) as exc: logger.warning(u'Error in the swagger file: {0}'.format(repr(exc))) continue # Get response data if hasattr(response, 'content'): response_text = response.content else: response_text = response.data # Convert to str if hasattr(response_text, 'decode'): response_text = response_text.decode('utf-8') # Get json try: response_json = json.loads(response_text) except ValueError: response_json = response_text assert response.status_code < 400 if response.status_code in response_spec.keys(): validate_definition(swagger_parser, response_spec[response.status_code], response_json) elif 'default' in response_spec.keys(): validate_definition(swagger_parser, response_spec['default'], response_json) else: raise AssertionError('Invalid status code {0}. Expected: {1}'.format(response.status_code, response_spec.keys())) if wait_between_test: # Wait time.sleep(2) yield (action, operation) else: # 404 => Postpone retry if {'action': action, 'operation': operation} in postponed: # Already postponed => raise error raise Exception(u'Invalid status code {0}'.format(response.status_code)) operation_sorted[action].append(operation) postponed.append({'action': action, 'operation': operation}) yield (action, operation) continue
class SwaggerAggregator(object): """Create an API from an aggregation of API.""" def __init__(self, config_file, *args): """Init the aggregation. Extra args will be used to replace args in the config file. Args: config_file: aggregation config. """ self.config_file = config_file self.swagger_args = args self.errors = [] self.swagger_apis = {} # Get config with open(self.config_file, "r") as f: self.yaml_file = yaml.load(f.read()) self.get_args() def get_args(self): """Get args of the config file. Returns Dict of arg name : arg value """ self.args_dict = {} if "args" in self.yaml_file: # Check if args category is in the config # Get a list of args args_name = [i.replace(" ", "") for i in self.yaml_file["args"].split(",")] # Associate each arg name with a args given in the init. index = 0 for arg_name in args_name: self.args_dict[arg_name] = self.swagger_args[index] index += 1 return self.args_dict def parse_value(self, value): """Replace in the value all args. For example if you have an arg 'toto': 'test' and a value: 'http://toto.com'. The function will return 'http://test.com' Args: value: the str to parse. Returns: Parsed value. """ if isinstance(value, (six.text_type, six.string_types)): for key in self.args_dict.keys(): value = value.replace(key, self.args_dict[key]) return value def get_swagger_from_url(self, api_url): """Get the swagger file of the microservice at the given url. Args: api_url: url of the microservice. """ return requests.get("{0}/swagger.json".format(self.parse_value(api_url))).json() def get_aggregate_swagger(self): """Get swagger files associated with the aggregates. Returns: A dict of swagger spec. """ if "apis" in self.yaml_file: # Check if apis is in the config file for api_name, api_url in self.yaml_file["apis"].items(): if api_name not in self.swagger_apis: # Get the swagger.json try: self.swagger_apis[api_name] = { "spec": self.get_swagger_from_url(api_url), "url": self.parse_value(api_url), } self.errors.remove(api_url) except (ConnectionError, JSONDecodeError): if api_url not in self.errors: self.errors.append(api_url) logger.warning(u"Cannot get swagger from {0}".format(api_url)) except ValueError: logger.info(u"Cannot remove {0} from errors".format(api_url)) return self.swagger_apis def exclude_paths(self, swagger): """Exclude path in the given swagger. Path to exclude are definded in the exclude_paths section of the config file. Args: swagger: dict of swagger spec. Returns: Swagger spec without the excluded paths. """ # Get exclude_paths path_exclude = {p.split(" ")[1]: [p.split(" ")[0].lower()] for p in self.yaml_file.get("exclude_paths", [])} # Remove excluded paths swagger_filtered = deepcopy(swagger) for path, path_spec in swagger["paths"].items(): if path in path_exclude.keys(): for action, _ in path_spec.items(): if action in path_exclude[path]: del swagger_filtered["paths"][path][action] return swagger_filtered def merge_aggregates(self, swagger): """Merge aggregates. Args: swagger: swagger spec to merge apis in. Returns: Aggregate of all apis. """ swagger_apis = deepcopy(self.get_aggregate_swagger()) for api, api_spec in swagger_apis.items(): # Rename definition to avoid collision. api_spec["spec"] = json.loads( json.dumps(api_spec["spec"]).replace("#/definitions/", u"#/definitions/{0}".format(api)) ) if "definitions" in api_spec["spec"]: for definition_name, definition_spec in api_spec["spec"]["definitions"].items(): if not definition_name.startswith(api): swagger["definitions"][u"{0}{1}".format(api, definition_name)] = definition_spec else: swagger["definitions"][definition_name] = definition_spec if "paths" in api_spec["spec"]: swagger["paths"].update(deepcopy(api_spec["spec"]["paths"])) def generate_swagger_json(self): """Generate a swagger from all the apis swagger.""" # Base swagger base_swagger = { "swagger": "2.0", "info": self.yaml_file.get("info"), "basePath": self.yaml_file.get("basePath"), "definitions": {}, "paths": {}, } # Merge aggregates self.merge_aggregates(base_swagger) base_swagger = self.exclude_paths(base_swagger) # Change operation id spec = {} uri = {} path_list = {} action_list = {} current_module = sys.modules[__name__] for path, path_spec in base_swagger["paths"].items(): for action, action_spec in path_spec.items(): # Generate function name and get spec and api url for the path func_name = uuid() path_list[func_name] = path action_list[func_name] = action spec[func_name], uri[func_name] = self.get_spec_from_uri(path, action) # Export generated function to a module level function setattr( current_module, func_name, self.generate_operation_id_function(spec, uri, path_list, action_list, func_name), ) # Set operationId action_spec["operationId"] = "swagger_aggregator.{0}".format(func_name) self.swagger_parser = SwaggerParser(swagger_dict=deepcopy(base_swagger)) # Remove exclude_fields from swagger for definition_name, definition_spec in base_swagger["definitions"].items(): if definition_name in self.yaml_file.get("exclude_fields", {}): for key in self.yaml_file["exclude_fields"][definition_name]: if key in definition_spec["required"]: definition_spec["required"].remove(key) if key in definition_spec["properties"]: del definition_spec["properties"][key] # Write swagger.yaml with open(os.path.join(os.path.dirname(os.path.realpath(self.config_file)), "swagger.yaml"), "w") as f: f.write(yaml.dump(base_swagger, default_flow_style=False)) def filter_definition(self, doc): """Filter the definition in the given doc. Args: doc: doc to filter. Returns: A filtered doc. """ if isinstance(doc, dict): # Filter dict doc_definition = self.swagger_parser.get_dict_definition(doc) # Get keys to remove keys_to_remove = self.yaml_file.get("exclude_fields", {}).get(doc_definition, []) # Remove keys for key in keys_to_remove: del doc[key] # Filter sub definition for k, v in doc.items(): doc[k] = self.filter_definition(v) return doc elif isinstance(doc, list): # List => filter every item for index, value in enumerate(doc): doc[index] = self.filter_definition(value) return doc else: return doc def generate_operation_id_function(self, spec, uri, path, action, func_name): """Generate a function to handle the current path. Args: spec: spec of the action the generated function should handle. uri: uri of the microservice corresponding to the spec. func_name: name the generated function should have. Returns: A function with func_name as name. """ @retry_http def func(*args, **kwargs): """Handle a flask request for the current action. """ # Get url from spec and flask query url = u"{0}{1}?{2}".format(uri[func.__name__], path[func.__name__], flask.request.query_string) p = re.compile("{(.+)}") for path_param in re.findall(p, url): for k, v in kwargs.items(): if k == path_param: url = url.replace("{{{0}}}".format(k), str(v)) requests_meth = getattr(requests, action[func.__name__]) headers = {k: v for k, v in dict(flask.request.headers).items() if v} if not flask.request.headers.get("Content-Type", "").startswith("multipart/form-data"): req = requests_meth(url, data=flask.request.data, headers=headers) else: # Remove Content-Length because it cause error on nginx side if "Content-Length" in headers: headers["X-Content-Length"] = headers["Content-Length"] del headers["Content-Length"] req = requests_meth(url, data=flask.request.stream, headers=headers) try: return (self.filter_definition(req.json()), req.status_code) except JSONDecodeError: return (req.text, req.status_code) func.__name__ = func_name return func def get_spec_from_uri(self, url, action): """Get spec from an path uri and an action. Args: url: url of the action. action: http action. Returns: (path spec, microservice url) """ for api, api_spec in self.swagger_apis.items(): for path_name, path_spec in api_spec["spec"]["paths"].items(): if path_name == url: return path_spec[action], api_spec["url"]
def swagger_test_yield(swagger_yaml_path=None, app_url=None, authorize_error=None, wait_time_between_tests=0, use_example=True, dry_run=False): """Test the given swagger api. Yield the action and operation done for each test. Test with either a swagger.yaml path for a connexion app or with an API URL if you have a running API. Args: swagger_yaml_path: path of your YAML swagger file. app_url: URL of the swagger api. authorize_error: dict containing the error you don't want to raise. ex: { 'get': { '/pet/': ['404'] } } Will ignore 404 when getting a pet. wait_time_between_tests: an number that will be used as waiting time between tests [in seconds]. use_example: use example of your swagger file instead of generated data. dry_run: don't actually execute the test, only show what would be sent Returns: Yield between each test: (action, operation) Raises: ValueError: In case you specify neither a swagger.yaml path or an app URL. """ if authorize_error is None: authorize_error = {} # Init test if swagger_yaml_path is not None and app_url is not None: app_client = requests swagger_parser = SwaggerParser(swagger_yaml_path, use_example=use_example) elif swagger_yaml_path is not None: specification_dir = os.path.dirname(os.path.realpath(swagger_yaml_path)) app = connexion.App(__name__, port=8080, debug=True, specification_dir=specification_dir) app.add_api(os.path.basename(swagger_yaml_path)) app_client = app.app.test_client() swagger_parser = SwaggerParser(swagger_yaml_path, use_example=use_example) elif app_url is not None: app_client = requests remote_swagger_def = requests.get(u'{0}/swagger.json'.format(app_url)).json() swagger_parser = SwaggerParser(swagger_dict=remote_swagger_def, use_example=use_example) else: raise ValueError('You must either specify a swagger.yaml path or an app url') print("Starting testrun against {0} or {1} using examples: " "{2}".format(swagger_yaml_path, app_url, use_example)) operation_sorted = {method: [] for method in _HTTP_METHODS} # Sort operation by action operations = swagger_parser.operation.copy() operations.update(swagger_parser.generated_operation) for operation, request in operations.items(): operation_sorted[request[1]].append((operation, request)) postponed = [] # For every operationId for action in _HTTP_METHODS: for operation in operation_sorted[action]: # Make request path = operation[1][0] action = operation[1][1] client_name = getattr(app_client, '__name__', 'FlaskClient') request_args = get_request_args(path, action, swagger_parser) url, body, headers, files = get_url_body_from_request(action, path, request_args, swagger_parser) logger.info(u'TESTING {0} {1}'.format(action.upper(), url)) if swagger_yaml_path is not None and app_url is None: if dry_run: logger.info("\nWould send %s to %s with body %s and headers %s" % (action.upper(), url, body, headers)) continue response = get_method_from_action(app_client, action)(url, headers=headers, data=body) else: full_path = u'{0}{1}'.format(app_url.replace(swagger_parser.base_path, ''), url) if dry_run: logger.info("\nWould send %s to %s with body %s and headers %s" % (action.upper(), full_path, body, headers)) continue response = get_method_from_action(app_client, action)(full_path, headers=dict(headers), data=body, files=files) logger.info(u'Using {0}, got status code {1} for ********** {2} {3}'.format( client_name, response.status_code, action.upper(), url)) # Check if authorize error if (action in authorize_error and path in authorize_error[action] and response.status_code in authorize_error[action][path]): logger.info(u'Got expected authorized error on {0} with status {1}'.format(url, response.status_code)) yield (action, operation) continue if response.status_code is not 404: # Get valid request and response body body_req = swagger_parser.get_send_request_correct_body(path, action) try: response_spec = swagger_parser.get_request_data(path, action, body_req) except (TypeError, ValueError) as exc: logger.warning(u'Error in the swagger file: {0}'.format(repr(exc))) continue # Get response data if hasattr(response, 'content'): response_text = response.content else: response_text = response.data # Convert to str if hasattr(response_text, 'decode'): response_text = response_text.decode('utf-8') # Get json try: response_json = json.loads(response_text) except ValueError: response_json = response_text if response.status_code in response_spec.keys(): validate_definition(swagger_parser, response_spec[response.status_code], response_json) elif 'default' in response_spec.keys(): validate_definition(swagger_parser, response_spec['default'], response_json) else: raise AssertionError('Invalid status code {0}. Expected: {1}'.format(response.status_code, response_spec.keys())) if wait_time_between_tests > 0: time.sleep(wait_time_between_tests) yield (action, operation) else: # 404 => Postpone retry if {'action': action, 'operation': operation} in postponed: # Already postponed => raise error raise Exception(u'Invalid status code {0}'.format(response.status_code)) operation_sorted[action].append(operation) postponed.append({'action': action, 'operation': operation}) yield (action, operation) continue
def __init__(self, swaggerFile, apiCaller, basepath=None): self._parser = SwaggerParser(swagger_path=swaggerFile) self._apiCaller = apiCaller self._basepath = self._parser.base_path if (basepath is None) else basepath
def swagger_test_yield( swagger_yaml_path=None, app_url=None, authorize_error=None, wait_between_test=False, use_example=True ): """Test the given swagger api. Yield the action and operation done for each test. Test with either a swagger.yaml path for a connexion app or with an API URL if you have a running API. Args: swagger_yaml_path: path of your YAML swagger file. app_url: URL of the swagger api. authorize_error: dict containing the error you don't want to raise. ex: { 'get': { '/pet/': ['404'] } } Will ignore 404 when getting a pet. wait_between_test: wait between tests (useful if you use Elasticsearch). use_example: use example of your swagger file instead of generated data. Returns: Yield between each test: (action, operation) Raises: ValueError: In case you specify neither a swagger.yaml path or an app URL. """ if authorize_error is None: authorize_error = {} # Init test if swagger_yaml_path is not None: app = connexion.App( __name__, port=8080, debug=True, specification_dir=os.path.dirname(os.path.realpath(swagger_yaml_path)) ) app.add_api(os.path.basename(swagger_yaml_path)) app_client = app.app.test_client() swagger_parser = SwaggerParser(swagger_yaml_path, use_example=use_example) elif app_url is not None: app_client = requests swagger_parser = SwaggerParser( swagger_dict=requests.get(u"{0}/swagger.json".format(app_url)).json(), use_example=False ) else: raise ValueError("You must either specify a swagger.yaml path or an app url") operation_sorted = {"post": [], "get": [], "put": [], "patch": [], "delete": []} # Sort operation by action for operation, request in swagger_parser.operation.items(): operation_sorted[request[1]].append((operation, request)) postponed = [] # For every operationId for action in ["post", "get", "put", "patch", "delete"]: for operation in operation_sorted[action]: # Make request path = operation[1][0] action = operation[1][1] request_args = get_request_args(path, action, swagger_parser) url, body, headers = get_url_body_from_request(action, path, request_args, swagger_parser) if swagger_yaml_path is not None: response = get_method_from_action(app_client, action)(url, headers=headers, data=body) else: response = get_method_from_action(app_client, action)( u"{0}{1}".format(app_url.replace(swagger_parser.base_path, ""), url), headers=dict(headers), data=body, ) logger.info(u"TESTING {0} {1}: {2}".format(action.upper(), url, response.status_code)) # Check if authorize error if ( action in authorize_error and url in authorize_error[action] and response.status_code in authorize_error[action][url] ): yield (action, operation) continue if not response.status_code == 404: # Get valid request and response body body_req = swagger_parser.get_send_request_correct_body(path, action) response_spec = swagger_parser.get_request_data(path, action, body_req) # Get response data if hasattr(response, "content"): response_text = response.content else: response_text = response.data # Get json try: response_json = json.loads(response_text.decode("utf-8")) except (ValueError, AttributeError): response_json = None assert response.status_code in response_spec.keys() assert response.status_code < 400 validate_definition(swagger_parser, response_spec[response.status_code], response_json) if wait_between_test: # Wait time.sleep(2) yield (action, operation) else: # 404 => Postpone retry if {"action": action, "operation": operation} in postponed: # Already postponed => raise error raise Exception(u"Invalid status code {0}".format(response.status_code)) operation_sorted[action].append(operation) postponed.append({"action": action, "operation": operation}) yield (action, operation) continue
# -*- coding: utf-8 -*- from swagger_parser import SwaggerParser parser = SwaggerParser(swagger_path='https://petstore.swagger.io/v2/swagger.json') print(parser)
def swagger_test_yield(swagger_yaml_path=None, app_url=None, authorize_error=None, wait_between_test=False, use_example=True): if authorize_error is None: authorize_error = {} #本地文件的情况 if swagger_yaml_path is not None: app_client = requests swagger_parser = SwaggerParser(swagger_path=swagger_yaml_path,use_example=True) #url的情况 elif app_url is not None: #使用requests提供的默认client app_client = requests #只需要/.json前面的url swagger_parser = SwaggerParser(swagger_dict=requests.get(u'{0}/swagger.json'.format(app_url)).json(), use_example=True) swagger_parser.definitions_example.get('Pet') else: raise ValueError('You must either specify a swagger.yaml path or an app url') operation_sorted = {'post': [], 'get': [], 'put': [], 'patch': [], 'delete': []} #将操作排序,operation.items存在的前提是swagger文档中每个操作均有operationID,所以要先做判断是否存在operaionid,但是无法应对有些有id有些无的情况。 if len(swagger_parser.operation.items()) > 0: flag = 0 for operation, request in swagger_parser.operation.items(): operation_sorted[request[1]].append((operation, request)) else: flag = 1 operation_sorted = get_action_from_path(swagger_parser) postponed = [] #记录测试的API数目 test_no = 0 #将测试信息输出到excel表格中 excel_headers = ('No.', 'path', 'action', 'status_code', 'inconsisdency', 'error_info') excel_dataset = tablib.Dataset() excel_dataset.headers = excel_headers #按上面的顺序对各个方法进行测试 for action in ['post', 'get', 'put', 'patch', 'delete']: for operation in operation_sorted[action]: if flag == 0: #operation_sorted以键值对的形式分别存储操作的路径和操作类型:key=operationid,value=(path, action, tag) path = operation[1][0] action = operation[1][1] if flag == 1: #以键值对的形式存储:key=path, value=action path = operation[0] action = operation[1] #调用函数得到自动生成的参数##################################################### #request_args = get_args_from_example(path, action, swagger_parser) request_args = get_args_from_example(path, action, swagger_parser) #处理url url, body, headers, files = get_url_body_from_request(action, path, request_args, swagger_parser) #将测试的信息输出到控制台 logger.info(u'TESTING {0} {1}'.format(action.upper(), url)) print(u'TESTING {0} {1}'.format(action.upper(), url)) #两种client参数有区别 if swagger_yaml_path is not None: my_url = u'{0}{1}'.format('https://' + swagger_parser.host, url) #应该替换为swagger_parser.schemes response = get_method_from_action(app_client, action)(my_url, headers=dict(headers), data=body, files=files) ###################################### url ########### else: my_url = u'{0}{1}'.format('https://' + swagger_parser.host, url) response = get_method_from_action(app_client, action)(my_url, headers=dict(headers), data=body, files=files) #直接访问response的status_code方法得到状态码 logger.info(u'Got status code: {0}'.format(response.status_code)) print(u'Got status code: {0}'.format(response.status_code)) #检查得到的状态码是不是authorize error中定义的 if (action in authorize_error and path in authorize_error[action] and response.status_code in authorize_error[action][path]): logger.info(u'Got authorized error on {0} with status {1}'.format(url, response.status_code)) print(u'Got authorized error on {0} with status {1}'.format(url, response.status_code) ) yield (action, operation) continue if not response.status_code == 404: #得到request的内容 body_req = swagger_parser.get_send_request_correct_body(path, action) #错误处理 try: response_spec = swagger_parser.get_request_data(path, action, body_req) except (TypeError, ValueError) as exc: logger.warning(u'Error in the swagger file: {0}'.format(repr(exc))) print(u'Error in the swagger file: {0}'.format(repr(exc)) + '\n') continue #分析response得到其中的数据 if hasattr(response, 'content'): response_text = response.content else: response_text = response.data #将得到的response放入json中 if hasattr(response_text, 'decode'): response_text = response_text.decode('utf-8') try: response_json = json.loads(response_text) except ValueError: response_json = response_text #大于400的状态码统一视作错误 #assert response.status_code < 400 if response.status_code in response_spec.keys(): inconsisdency = validate_ins_definition(swagger_parser, response_spec[response.status_code], response_json) elif 'default' in response_spec.keys(): inconsisdency = validate_ins_definition(swagger_parser, response_spec['default'], response_json) #所有状态码【200】 都视为正确的返回,如果没有写200的参数,那么默认返回没有参数,而是一个标示操作成功的bool elif response.status_code is 200: logger.info('Got status code 200, but undefined in spec.') print('Got status code 200, but undefined in spec.\n') elif response.status_code is 405: logger.info('Got status code 405. Method Not Allowed') else: logger.info(u'Got status code:{0},parameters error or authorization error.'.format(response.status_code)) if response.status_code == 200: test_no += 1 if inconsisdency: excel_dataset.append([test_no, path, action, response.status_code, 'Yes', '-']) else: excel_dataset.append([test_no, path, action, response.status_code, 'No', '-']) else: test_no += 1 #excel_dataset.append([test_no, path, action, response.status_code, '-', response.reason]) excel_dataset.append([test_no, path, action, response.status_code, '-', response.content]) if wait_between_test: # Wait time.sleep(2) yield (action, operation) else: #得到404错误,等待后重试 if {'action': action, 'operation': operation} in postponed: #如果已经重试过了,报错 logger.info(u'Path {0} has been modified or removed!'.format(path)) test_no += 1 excel_dataset.append([test_no, path, action, response.status_code, '-', response.reason]) postponed.remove({'action': action, 'operation': operation}) else: #将没有重试过的方法放到测试队列最后 operation_sorted[action].append(operation) #同时标记为已经推迟过 postponed.append({'action': action, 'operation': operation}) yield (action, operation) continue excel_dataset.title = 'test_result' #导出到Excel表格中 excel_file = open('./output/test_excel.xlsx', 'wb') excel_file.write(excel_dataset.xlsx) excel_file.close()