def test_json_errors_parsing(): # The outputs/ directory has sample output responses # For each file in outputs/ there's a corresponding file # in expected/ that has the expected parsed response. base_dir = os.path.join(os.path.dirname(__file__), 'json') json_responses_dir = os.path.join(base_dir, 'errors') expected_parsed_dir = os.path.join(base_dir, 'expected') session = ibm_botocore.session.get_session() for json_response_file in os.listdir(json_responses_dir): # Files look like: 'datapipeline-create-pipeline.json' service_name, operation_name = os.path.splitext( json_response_file)[0].split('-', 1) expected_parsed_response = os.path.join(expected_parsed_dir, json_response_file) raw_response_file = os.path.join(json_responses_dir, json_response_file) with open(expected_parsed_response) as f: expected = json.load(f) service_model = session.get_service_model(service_name) operation_names = service_model.operation_names operation_model = None for op_name in operation_names: if xform_name(op_name) == operation_name.replace('-', '_'): operation_model = service_model.operation_model(op_name) with open(raw_response_file, 'rb') as f: raw_response_body = f.read() yield _test_parsed_response, raw_response_file, \ raw_response_body, operation_model, expected
def _all_inputs(): session = ibm_botocore.session.get_session() for service_name in session.get_available_services(): service_model = session.get_service_model(service_name) for operation_name in service_model.operation_names: operation_model = service_model.operation_model(operation_name) input_shape = operation_model.input_shape if input_shape is not None and input_shape.members: yield input_shape, service_name, operation_name
def _pagination_configs(): session = ibm_botocore.session.get_session() loader = session.get_component('data_loader') services = loader.list_available_services('paginators-1') for service_name in services: service_model = session.get_service_model(service_name) page_config = loader.load_service_model(service_name, 'paginators-1', service_model.api_version) for op_name, single_config in page_config['pagination'].items(): yield (op_name, single_config, service_model)
def test_can_generate_all_inputs(): session = ibm_botocore.session.get_session() generator = ArgumentGenerator() for service_name in session.get_available_services(): service_model = session.get_service_model(service_name) for operation_name in service_model.operation_names: operation_model = service_model.operation_model(operation_name) input_shape = operation_model.input_shape if input_shape is not None and input_shape.members: yield (_test_can_generate_skeleton, generator, input_shape, service_name, operation_name)
def test_lint_shared_example_configs(): session = ibm_botocore.session.Session() loader = session.get_component('data_loader') services = loader.list_available_services('examples-1') for service in services: service_model = session.get_service_model(service) example_config = loader.load_service_model(service, 'examples-1', service_model.api_version) examples = example_config.get("examples", {}) for operation, operation_examples in examples.items(): for example in operation_examples: yield _lint_single_example, operation, example, service_model
def test_xml_parsing(): for dp in ['responses', 'errors']: data_path = os.path.join(os.path.dirname(__file__), 'xml') data_path = os.path.join(data_path, dp) session = create_session() xml_files = glob.glob('%s/*.xml' % data_path) service_names = set() for fn in xml_files: service_names.add(os.path.split(fn)[1].split('-')[0]) for service_name in service_names: service_model = session.get_service_model(service_name) service_xml_files = glob.glob('%s/%s-*.xml' % (data_path, service_name)) for xmlfile in service_xml_files: expected = _get_expected_parsed_result(xmlfile) operation_model = _get_operation_model(service_model, xmlfile) raw_response_body = _get_raw_response_body(xmlfile) yield _test_parsed_response, xmlfile, raw_response_body, \ operation_model, expected
def _uhg_test_json_parsing(): input_path = os.path.join(os.path.dirname(__file__), 'json') input_path = os.path.join(input_path, 'inputs') output_path = os.path.join(os.path.dirname(__file__), 'json') output_path = os.path.join(output_path, 'outputs') session = ibm_botocore.session.get_session() jsonfiles = glob.glob('%s/*.json' % input_path) service_names = set() for fn in jsonfiles: service_names.add(os.path.split(fn)[1].split('-')[0]) for service_name in service_names: service_model = session.get_service_model(service_name) service_json_files = glob.glob('%s/%s-*.json' % (input_path, service_name)) for jsonfile in service_json_files: expected = _get_expected_parsed_result(jsonfile) operation_model = _get_operation_model(service_model, jsonfile) with open(jsonfile, 'rb') as f: raw_response_body = f.read() yield _test_parsed_response, jsonfile, \ raw_response_body, operation_model, expected
def _xml_test_cases(): session = create_session() test_cases = [] for dp in ['responses', 'errors']: data_path = os.path.join(os.path.dirname(__file__), 'xml') data_path = os.path.join(data_path, dp) xml_files = glob.glob('%s/*.xml' % data_path) service_names = set() for fn in xml_files: service_names.add(os.path.split(fn)[1].split('-')[0]) for service_name in service_names: service_model = session.get_service_model(service_name) service_xml_files = glob.glob('%s/%s-*.xml' % (data_path, service_name)) for xmlfile in service_xml_files: expected = _get_expected_parsed_result(xmlfile) operation_model = _get_operation_model(service_model, xmlfile) test_cases.append( (xmlfile, operation_model, expected) ) return sorted(test_cases)