def test_lang_c(gen_paths: Any, unique_name_evaluator: Any, implicit: bool, use_standard_types: bool, configurable_language_context_factory: Callable) -> None: """ Generates and verifies JSON with values filtered using the c language support module. """ lctx = LanguageContext() generated_values = ptest_lang_c(gen_paths, implicit, unique_name_evaluator, use_standard_types, configurable_language_context_factory) if implicit: lang_any = generated_values["tests"]["lang_any"] assert lang_any['id_0'] == 'zX003123_class__for_u2___zX0028zX002Aother_stuffzX002DzX0026zX002DsuchzX0029' assert lang_any['id_1'] == '_reserved' assert lang_any['id_2'] == '_also_reserved' assert lang_any['id_3'] == '_register' assert lang_any['id_4'] == 'False' assert lang_any['id_5'] == '_return' assert lang_any['id_7'] == 'I_zX2764_UAVCAN' assert lang_any['id_8'] == 'zX0031_zX2764_UAVCAN' assert lang_any['id_9'] == 'str' assert lang_any['id_A'] == '_strr' assert lang_any['id_B'] == '_uINT_FOO_MIN' assert lang_any['id_C'] == '_iNT_C' assert lang_any['id_D'] == 'LC_Is_reserved' assert lang_any['id_E'] == 'NOT_ATOMIC_YO' assert lang_any['id_F'] == '_aTOMIC_YO' assert lang_any['id_G'] == '_memory_order_yo' assert '_flight__time' == c_filter_id(lctx.get_language('nunavut.lang.c'), Dummy('_Flight__time'))
def test_python_filter_imports_for_array_type(gen_paths, stropping, sort): # type: ignore lctx = LanguageContext() lctx.config.set('nunavut.lang.py', 'enable_stropping', str(stropping)) uavcan_dir = str(gen_paths.dsdl_dir / pathlib.Path('uavcan')) type_map = read_namespace(str(gen_paths.dsdl_dir / pathlib.Path('new')), [uavcan_dir]) assert len(type_map) == 2 from nunavut.lang.py import filter_imports test_subject = next( filter(lambda type: (type.short_name == 'hotness'), type_map)) imports = filter_imports(lctx.get_language('nunavut.lang.py'), test_subject, sort=sort) assert len(imports) == 3 assert 'new' == imports[0] if stropping: assert 'uavcan.str_' == imports[1] else: assert 'uavcan.str' == imports[1] assert 'uavcan.time' == imports[2]
def test_lang_cpp(gen_paths): # type: ignore """ Generates and verifies JSON with values filtered using the cpp language module. """ lctx = LanguageContext() generated_values = ptest_lang_cpp(gen_paths, True) lang_any = generated_values["tests"]["lang_any"] assert lang_any['id_0'] == '_123_class_for_u2_zX0028zX002Aother_stuffzX002DzX0026zX002DsuchzX0029' assert lang_any['id_1'] == '_reserved' assert lang_any['id_2'] == 'zX005FzX005Falso_reserved' assert lang_any['id_3'] == '_register' assert lang_any['id_4'] == 'False' assert lang_any['id_5'] == '_return' assert lang_any['id_7'] == 'I_zX2764_UAVCAN' assert lang_any['id_8'] == '_1_zX2764_UAVCAN' assert lang_any['id_9'] == 'str' assert lang_any['id_A'] == 'strr' assert lang_any['id_B'] == '_uINT_FOO_MIN' assert lang_any['id_C'] == '_iNT_C' assert lang_any['id_D'] == 'LC_Is_reserved' assert lang_any['id_E'] == 'NOT_ATOMIC_YO' assert lang_any['id_F'] == '_aTOMIC_YO' lang_cpp = lctx.get_language('nunavut.lang.cpp') assert '_flight_time' == cpp_filter_id(lang_cpp, Dummy('_Flight_time'))
def test_lang_py_implicit(gen_paths, unique_name_evaluator): # type: ignore """ Generates and verifies JSON with values filtered using the python language support module. """ lctx = LanguageContext() generated_values = ptest_lang_py(gen_paths, True, unique_name_evaluator) lang_any = generated_values["tests"]["lang_any"] assert lang_any['id_0'] == 'zX003123_class__for_u2___zX0028zX002Aother_stuffzX002DzX0026zX002DsuchzX0029' assert lang_any['id_1'] == '_Reserved' assert lang_any['id_2'] == '__also_reserved' assert lang_any['id_3'] == 'register' assert lang_any['id_4'] == 'False_' assert lang_any['id_5'] == 'return_' assert lang_any['id_7'] == 'I_zX2764_UAVCAN' assert lang_any['id_8'] == 'zX0031_zX2764_UAVCAN' assert lang_any['id_9'] == 'str_' assert lang_any['id_A'] == 'strr' assert lang_any['id_B'] == 'UINT_FOO_MIN' assert lang_any['id_C'] == 'INT_C' assert lang_any['id_D'] == 'LC_Is_reserved' assert lang_any['id_E'] == 'NOT_ATOMIC_YO' assert lang_any['id_F'] == 'ATOMIC_YO' assert '_Flight__time' == py_filter_id(lctx.get_language('nunavut.lang.py'), Dummy('_Flight__time'))
def test_config_overrides(gen_paths): # type: ignore """ Test providing different configuration values to a LanguageContext object. """ additional_config_files = [gen_paths.root_dir / Path('tox').with_suffix('.ini')] lctx = LanguageContext(additional_config_files=additional_config_files) assert '.hc' == lctx.get_language('c').extension assert 'This is a test' == lctx.get_language('c').get_config_value('option_not_in_properties')
def test_either_target_or_extension() -> None: """ LanguageContext requires either a target or an extension or both but not neither. """ _ = LanguageContext(target_language='py') _ = LanguageContext(extension='.py') _ = LanguageContext(target_language='py', extension='.py') with pytest.raises(ValueError): _ = LanguageContext() with pytest.raises(KeyError): _ = LanguageContext('foobar')
def _assert_language_config_value(target_language: typing.Union[typing.Optional[str], LanguageContext], key: str, expected_value: typing.Any, message: typing.Optional[str]) -> None: if isinstance(target_language, LanguageContext): lctx = target_language else: lctx = LanguageContext(target_language) language = lctx.get_target_language() if language is None: raise AssertionError('Unable to determine target language from provided arguments.') if expected_value != language.get_config_value(key): raise AssertionError(message)
def test_python_filter_imports(gen_paths): # type: ignore lctx = LanguageContext() type_map = read_namespace(str(gen_paths.dsdl_dir / pathlib.Path('uavcan')), []) from nunavut.lang.py import filter_imports test_subject = next( filter(lambda type: (type.short_name == 'bar'), type_map)) imports = filter_imports(lctx.get_language('nunavut.lang.py'), test_subject) assert len(imports) == 1 assert 'uavcan.time' == imports[0]
def test_filter_full_reference_name(gen_paths, language_name, stropping, namespace_separator): """ Cover issue #153 """ lctx = LanguageContext() ln_package_name = 'nunavut.lang.{}'.format(language_name) lctx.config.set(ln_package_name, 'enable_stropping', str(stropping)) ln = lctx.get_language(ln_package_name) import importlib from pydsdl import ServiceType, StructureType, Version test_subject_module = importlib.import_module(ln_package_name) service_request_type = StructureType( name='register.getting.tired.of.Python', version=Version(0, 1), attributes=[], deprecated=False, fixed_port_id=None, source_file_path=Path(), has_parent_service=True) service_response_type = StructureType( name='register.getting.tired.of.Python', version=Version(0, 1), attributes=[], deprecated=False, fixed_port_id=None, source_file_path=Path(), has_parent_service=True) service_type = ServiceType(service_request_type, service_response_type, None) # C++ is special because namespaces are part of the language and therefore each namespace # name must be stropped top_level_name = ('_register' if stropping and language_name == 'cpp' else 'register') assert test_subject_module.filter_full_reference_name( ln, service_type) == '{}.getting.tired.of_0_1'.format( top_level_name).replace('.', namespace_separator) assert test_subject_module.filter_full_reference_name( ln, service_request_type) == '{}.getting.tired.of.Python_0_1'.format(top_level_name)\ .replace('.', namespace_separator) assert test_subject_module.filter_full_reference_name( ln, service_response_type) == '{}.getting.tired.of.Python_0_1'.format(top_level_name)\ .replace('.', namespace_separator)
def test_python_filter_short_reference_name(gen_paths): # type: ignore lctx = LanguageContext() type_map = read_namespace(str(gen_paths.dsdl_dir / pathlib.Path('uavcan')), []) from nunavut.lang.py import filter_short_reference_name test_subject = next( filter(lambda type: (type.short_name == 'SynchronizedTimestamp'), type_map)) full_reference_name = filter_short_reference_name( lctx.get_language('nunavut.lang.py'), test_subject) assert "SynchronizedTimestamp_1_0" == full_reference_name
def _make_filter_test_template(filter_or_list: typing.Union[typing.Callable, typing.List[typing.Callable]], body: str, expected: str, target_language_or_language_context: typing.Union[typing.Optional[str], LanguageContext], **globals: typing.Optional[typing.Dict[str, typing.Any]]) -> str: from nunavut.jinja import CodeGenEnvironment e = CodeGenEnvironment(loader=DictLoader({'test': body})) if globals is not None: e.globals.update(globals) if isinstance(target_language_or_language_context, LanguageContext): lctx = target_language_or_language_context else: lctx = LanguageContext(target_language_or_language_context) filters = (filter_or_list if isinstance(filter_or_list, list) else [filter_or_list]) for filter in filters: filter_name = filter.__name__[7:] if hasattr(filter, ENVIRONMENT_FILTER_ATTRIBUTE_NAME) and \ getattr(filter, ENVIRONMENT_FILTER_ATTRIBUTE_NAME): e.filters[filter_name] = functools.partial(filter, e) else: e.filters[filter_name] = filter if hasattr(filter, CONTEXT_FILTER_ATTRIBUTE_NAME) and getattr(filter, CONTEXT_FILTER_ATTRIBUTE_NAME): context = MagicMock() e.filters[filter_name] = functools.partial(filter, context) else: e.filters[filter_name] = filter if hasattr(filter, LANGUAGE_FILTER_ATTRIBUTE_NAME): language_name = getattr(filter, LANGUAGE_FILTER_ATTRIBUTE_NAME) e.filters[filter_name] = functools.partial(filter, lctx.get_language(language_name)) else: e.filters[filter_name] = filter target_language_resolved = lctx.get_target_language() if target_language_resolved is not None: e.globals.update(target_language_resolved.get_globals()) e.globals['options'].update(target_language_resolved.get_options()) rendered = str(e.get_template('test').render()) if expected != rendered: msg = 'Unexpected template output\n\texpected : {}\n\twas : {}'.format( expected.replace('\n', '\\n'), rendered.replace('\n', '\\n')) raise AssertionError(msg) return rendered
def _make_configurable_language_context(config_overrides: typing.Mapping[str, typing.Mapping[str, typing.Any]], target_language: typing.Optional[str] = None, extension: typing.Optional[str] = None, namespace_output_stem: typing.Optional[str] = None, omit_serialization_support_for_target: bool = True) \ -> LanguageContext: from tempfile import NamedTemporaryFile config_bytes = [] # type: typing.List[bytearray] def _config_gen(indent: int, key: str, value: typing.Union[typing.Dict, typing.Any], out_config_bytes: typing.List[bytearray]) \ -> None: line = bytearray('{}{} = '.format(' ' * indent, key), 'utf8') if isinstance(value, dict): line += bytearray('\n', 'utf8') out_config_bytes.append(line) for subkey, subvalue in value.items(): _config_gen(indent + 1, subkey, subvalue, out_config_bytes) else: line += bytearray('{}\n'.format(str(value)), 'utf8') out_config_bytes.append(line) for section, config in config_overrides.items(): config_bytes.append(bytearray('[{}]\n'.format(section), 'utf8')) for key, value in config.items(): _config_gen(0, key, value, config_bytes) with NamedTemporaryFile() as config_override_file: config_override_file.writelines(config_bytes) config_override_file.flush() return LanguageContext(target_language, extension, additional_config_files=[pathlib.Path(config_override_file.name)])
def test_three_roots(gen_paths): # type: ignore """ Generates a type that uses another type from a different root namespace. """ root_namespace = str(gen_paths.dsdl_dir / Path("scotec")) includes = [ str(gen_paths.dsdl_dir / Path("huckco")), str(gen_paths.dsdl_dir / Path("esmeinc")) ] compound_types = read_namespace(root_namespace, includes, allow_unregulated_fixed_port_id=True) language_context = LanguageContext(extension='.json') namespace = build_namespace_tree(compound_types, root_namespace, gen_paths.out_dir, language_context) generator = DSDLCodeGenerator(namespace, templates_dir=gen_paths.templates_dir) generator.generate_all(False) # Now read back in and verify outfile = gen_paths.find_outfile_in_namespace("scotec.FatherType", namespace) assert (outfile is not None) with open(str(outfile), 'r') as json_file: json_blob = json.load(json_file) assert len(json_blob) > 0 assert len(json_blob['scotec.FatherType']['attributes']) == 2 assert json_blob['scotec.FatherType']['attributes'][0][ 'type'] == 'huckco.SonType.0.1' assert json_blob['scotec.FatherType']['attributes'][1][ 'type'] == 'esmeinc.DaughterType.0.1'
def test_namespace_stropping(gen_paths, language_key, expected_file_ext, expected_stropp_part_0, expected_stropp_part_1): """Test generating a namespace that uses a reserved keyword for a given language.""" language_context = LanguageContext(language_key) namespace, root_namespace_path, compound_types = gen_test_namespace( gen_paths, language_context) assert len(compound_types) == 2 generator = Generator(namespace, True, language_context, gen_paths.templates_dir / Path('default')) generator.generate_all() expected_stropped_ns = 'scotec.{}.{}'.format(expected_stropp_part_0, expected_stropp_part_1) outfile = gen_paths.find_outfile_in_namespace(expected_stropped_ns, namespace) assert (outfile is not None) with open(str(outfile), 'r') as json_file: json_blob = json.load(json_file) assert json_blob is not None output_path_for_stropped = namespace.find_output_path_for_type( compound_types[1]) expected_stable_path = gen_paths.out_dir / 'scotec' expected_path_and_file = expected_stable_path / expected_stropp_part_0 / expected_stropp_part_1 / 'ATOMIC_TYPE_0_1' assert expected_path_and_file.with_suffix( expected_file_ext) == output_path_for_stropped
def test_instance_tests(gen_paths): # type: ignore """ Verifies that instance tests are added for pydsdl.SerializableType and all of its subclasses. """ root_namespace_dir = gen_paths.dsdl_dir / Path("buncho") type_map = read_namespace(str(root_namespace_dir), []) language_context = LanguageContext('js') namespace = build_namespace_tree(type_map, root_namespace_dir, gen_paths.out_dir, language_context) generator = DSDLCodeGenerator(namespace, templates_dir=gen_paths.templates_dir) generator.generate_all(False) outfile = gen_paths.find_outfile_in_namespace("buncho.serializables", namespace) assert (outfile is not None) with open(str(outfile), 'r') as json_file: json_blob = json.load(json_file) assert json_blob is not None assert json_blob["this_field_is_an_int32"]["isSerializableType"] is True assert json_blob["this_field_is_an_int32"]["isIntegerType"] is True assert json_blob["this_field_is_an_int32"]["isFloatType"] is False assert json_blob["this_field_is_an_int32"]["isIntegerType_field"] is True assert json_blob["this_field_is_a_float"]["isSerializableType"] is True assert json_blob["this_field_is_a_float"]["isIntegerType"] is False assert json_blob["this_field_is_a_float"]["isIntegerType_field"] is False assert json_blob["this_field_is_a_float"]["isFloatType"] is True
def test_line_pp(gen_paths): # type: ignore """ Exercises the LinePostProcessor type. """ class TestLinePostProcessor0(nunavut.postprocessors.LinePostProcessor): def __call__( self, line_and_lineend: typing.Tuple[str, str]) -> typing.Tuple[str, str]: if len(line_and_lineend[0]) == 0: return ('', '') else: return line_and_lineend class TestLinePostProcessor1(nunavut.postprocessors.LinePostProcessor): def __init__(self): # type: ignore self._lines = [] # type: typing.List[str] def __call__( self, line_and_lineend: typing.Tuple[str, str]) -> typing.Tuple[str, str]: self._lines.append(line_and_lineend[0]) return line_and_lineend line_pp0 = TestLinePostProcessor0() line_pp1 = TestLinePostProcessor1() namespace = _test_common_namespace(gen_paths) generator = nunavut.jinja.Generator(namespace, False, LanguageContext(extension='.json'), gen_paths.templates_dir) generator.generate_all(False, True, [line_pp0, line_pp1]) assert len(line_pp1._lines) > 0 _test_common_post_condition(gen_paths, namespace)
def test_namespace_generation(gen_paths): # type: ignore """Test actually generating a namepace file.""" language_context = LanguageContext(extension='.json', namespace_output_stem='__module__') namespace, root_namespace_path, compound_types = gen_test_namespace( gen_paths, language_context) assert len(compound_types) == 2 generator = Generator(namespace, True, language_context, gen_paths.templates_dir / Path('default')) generator.generate_all() for nested_namespace in namespace.get_nested_namespaces(): nested_namespace_path = Path(root_namespace_path) / Path( *nested_namespace.full_name.split('.')[1:]) assert nested_namespace.source_file_path == str(nested_namespace_path) outfile = gen_paths.find_outfile_in_namespace("scotec.mcu", namespace) assert (outfile is not None) with open(str(outfile), 'r') as json_file: json_blob = json.load(json_file) assert json_blob is not None assert json_blob['scotec.mcu']['namespace'] == 'scotec.mcu' output_path_for_timer = namespace.find_output_path_for_type( compound_types[0]) assert (gen_paths.out_dir / 'scotec' / 'mcu' / 'Timer_0_1').with_suffix('.json') == output_path_for_timer
def test_custom_filter_and_test(gen_paths): # type: ignore root_path = str(gen_paths.dsdl_dir / Path("uavcan")) output_path = gen_paths.out_dir / 'filter_and_test' compound_types = read_namespace(root_path, []) language_context = LanguageContext(extension='.json') namespace = build_namespace_tree(compound_types, root_path, output_path, language_context) template_path = gen_paths.templates_dir / Path('custom_filter_and_test') generator = DSDLCodeGenerator( namespace, templates_dir=template_path, additional_filters={'custom_filter': lambda T: 'hi mum'}, additional_tests={'custom_test': lambda T: True}) generator.generate_all() outfile = gen_paths.find_outfile_in_namespace( "uavcan.time.SynchronizedTimestamp", namespace) assert (outfile is not None) with open(str(outfile), 'r') as json_file: json_blob = json.load(json_file) assert json_blob is not None assert json_blob['filter_result'] == 'hi mum' assert json_blob['test_result'] == 'yes'
def test_get_templates(gen_paths): # type: ignore """ Verifies the nunavut.jinja.Generator.get_templates() method. """ root_namespace_dir = gen_paths.dsdl_dir / Path("uavcan") root_namespace = str(root_namespace_dir) serializable_types = read_namespace(root_namespace, []) language_context = LanguageContext(extension='.json') namespace = build_namespace_tree(serializable_types, root_namespace_dir, gen_paths.out_dir, language_context) generator = Generator(namespace, templates_dir=gen_paths.templates_dir) templates = generator.get_templates() count = 0 for template in templates: count += 1 assert count > 0 # Do it twice just to cover in-memory cache templates = generator.get_templates() count = 0 for template in templates: count += 1 assert count > 0
def test_filter_full_reference_name_via_template(gen_paths, language_name, namespace_separator): root_path = str(gen_paths.dsdl_dir / Path("uavcan")) output_path = gen_paths.out_dir / 'filter_and_test' compound_types = read_namespace(root_path, []) language_context = LanguageContext(target_language=language_name) namespace = build_namespace_tree(compound_types, root_path, output_path, language_context) template_path = gen_paths.templates_dir / Path('full_reference_test') generator = DSDLCodeGenerator(namespace, templates_dir=template_path) generator.generate_all() outfile = gen_paths.find_outfile_in_namespace("uavcan.str.bar_svc", namespace) assert (outfile is not None) with open(str(outfile), 'r') as json_file: json_blob = json.load(json_file) assert json_blob is not None assert json_blob['parent'][ 'full_reference_name'] == 'uavcan.str.bar_svc_1_0'.replace( '.', namespace_separator) assert json_blob['parent'][ 'short_reference_name'] == 'bar_svc' if language_name == 'cpp' else 'bar_svc_1_0' assert json_blob['request'][ 'full_reference_name'] == 'uavcan.str.bar_svc.Request_1_0'.replace( '.', namespace_separator) assert json_blob['request']['short_reference_name'] == 'Request_1_0' assert json_blob['response'][ 'full_reference_name'] == 'uavcan.str.bar_svc.Response_1_0'.replace( '.', namespace_separator) assert json_blob['response']['short_reference_name'] == 'Response_1_0'
def _test_common_namespace(gen_paths, target_language: str = 'js', extension: str = '.json'): # type: ignore root_namespace_dir = gen_paths.dsdl_dir / pathlib.Path("uavcan") root_namespace = str(root_namespace_dir) return nunavut.build_namespace_tree(pydsdl.read_namespace(root_namespace, []), root_namespace_dir, gen_paths.out_dir, LanguageContext(target_language, extension=extension))
def ptest_lang_cpp(gen_paths, implicit): # type: ignore """Generates and verifies JSON with values filtered using the cpp language module. """ root_namespace_dir = gen_paths.dsdl_dir / Path("langtest") root_namespace = str(root_namespace_dir) compound_types = read_namespace(root_namespace, [], allow_unregulated_fixed_port_id=True) if implicit: templates_dirs = [gen_paths.templates_dir / Path("implicit") / Path("cpp")] else: templates_dirs = [gen_paths.templates_dir / Path("explicit")] templates_dirs.append(gen_paths.templates_dir / Path("common")) language_context = LanguageContext('cpp' if implicit else None, '.hpp' if not implicit else None) namespace = build_namespace_tree(compound_types, root_namespace_dir, gen_paths.out_dir, language_context) generator = Generator(namespace, templates_dir=templates_dirs) generator.generate_all(False) # Now read back in and verify outfile = gen_paths.find_outfile_in_namespace("langtest.cpp.ns.TestType", namespace) assert (outfile is not None) generated_values = {} # type: Dict with open(str(outfile), 'r') as python_file: exec(python_file.read(), generated_values) assert len(generated_values) lang_cpp_output = generated_values["tests"]["lang_cpp"] assert lang_cpp_output["namespace"] == "langtest.cpp.ns" assert lang_cpp_output["namespace_open"] == r'''namespace langtest { namespace cpp { namespace ns { ''' assert lang_cpp_output["namespace_open_wo_nl"] == r'''namespace langtest { namespace cpp { namespace ns { ''' assert lang_cpp_output["namespace_close"] == r'''} } } ''' assert lang_cpp_output["namespace_close_w_comments"] == r'''} // namespace ns } // namespace cpp } // namespace langtest ''' return generated_values
def test_python35_resolve_behavior(gen_paths): # type: ignore """Make sure Python3.5 and Python 3.6 throw the same exception here.""" language_context = LanguageContext('c') with pytest.raises(FileNotFoundError): Namespace('foo.bar', gen_paths.dsdl_dir / Path("scotec"), gen_paths.out_dir, language_context)
def test_get_all_types(gen_paths): # type: ignore """Verify the get_all_namespaces method in Namespace""" namespace, _, _ = gen_test_namespace(gen_paths, LanguageContext(extension='.json')) index = dict() for ns, path in namespace.get_all_types(): index[path] = ns assert len(index) == 6
def test_namespace_eq(gen_paths): # type: ignore """Verify the get_all_namespaces method in Namespace""" language_context = LanguageContext(extension='.json') namespace0, _, _ = gen_test_namespace(gen_paths, language_context) namespace1 = Namespace('', gen_paths.dsdl_dir, gen_paths.out_dir, language_context) assert namespace0 == namespace0 assert namespace1 == namespace1 assert namespace0 != namespace1 assert "foo" != namespace0
def test_empty_pp_array(gen_paths): # type: ignore """ Verifies the behavior of a zero length post_processors argument. """ namespace = _test_common_namespace(gen_paths) generator = nunavut.jinja.Generator(namespace, False, LanguageContext(extension='.json'), gen_paths.templates_dir) generator.generate_all(False, True, []) _test_common_post_condition(gen_paths, namespace)
def test_python_filter_includes(gen_paths, stropping, sort): # type: ignore lctx = LanguageContext(target_language='cpp', extension='.h') lctx.config.set('nunavut.lang.cpp', 'enable_stropping', str(stropping)) uavcan_dir = (gen_paths.dsdl_dir / pathlib.Path('uavcan')).as_posix() type_map = read_namespace( (gen_paths.dsdl_dir / pathlib.Path('new')).as_posix(), [uavcan_dir]) from nunavut.lang.cpp import filter_includes test_subject = next( filter(lambda type: (type.short_name == 'hotness'), type_map)) imports = filter_includes(lctx.get_language('nunavut.lang.cpp'), test_subject, sort=sort) assert len(imports) == 5 def assert_path_in_imports(path: str) -> None: nonlocal imports assert path in imports if stropping: if sort: assert [ '"_new/malloc_1_0.h"', '"uavcan/str/bar_1_0.h"', '"uavcan/time/SynchronizedTimestamp_1_0.h"', '<array>', '<cstdint>' ] == imports else: map(assert_path_in_imports, ('"uavcan/time/SynchronizedTimestamp_1_0.h"', '"_new/malloc_1_0.h"', '"uavcan/str/bar_1_0.h"', '<array>', '<cstdint>')) elif sort: assert [ '"new/malloc_1_0.h"', '"uavcan/str/bar_1_0.h"', '"uavcan/time/SynchronizedTimestamp_1_0.h"', '<array>', '<cstdint>' ] == imports else: map(assert_path_in_imports, ('"uavcan/time/SynchronizedTimestamp_1_0.h"', '"new/malloc_1_0.h"', '"uavcan/str/bar_1_0.h"', '<array>', '<cstdint>'))
def parameterized_test_namespace_(gen_paths, templates_subdir): # type: ignore language_context = LanguageContext(extension='.json') namespace, root_namespace_path, _ = gen_test_namespace(gen_paths, language_context) generator = DSDLCodeGenerator(namespace, generate_namespace_types=YesNoDefault.NO, templates_dir=gen_paths.templates_dir / Path(templates_subdir)) generator.generate_all() assert namespace.source_file_path == Path(root_namespace_path) assert namespace.full_name == 'scotec' for nested_namespace in namespace.get_nested_namespaces(): nested_namespace_path = Path(root_namespace_path) / Path(*nested_namespace.full_name.split('.')[1:]) assert nested_namespace.source_file_path == nested_namespace_path
def test_trim_trailing_ws(gen_paths): # type: ignore namespace = _test_common_namespace(gen_paths) generator = nunavut.jinja.Generator(namespace, False, LanguageContext(extension='.json'), gen_paths.templates_dir) generator.generate_all(False, True, [nunavut.postprocessors.TrimTrailingWhitespace()]) outfile = _test_common_post_condition(gen_paths, namespace) with open(str(outfile), 'r') as json_file: for line in json_file: assert re.search(r' +$', line) is None
def ptest_lang_py(gen_paths, implicit, unique_name_evaluator): # type: ignore """ Generates and verifies JSON with values filtered using the python language support module. """ root_namespace_dir = gen_paths.dsdl_dir / Path("langtest") root_namespace = str(root_namespace_dir) if implicit: templates_dirs = [ gen_paths.templates_dir / Path("implicit") / Path("py") ] else: templates_dirs = [gen_paths.templates_dir / Path("explicit")] templates_dirs.append(gen_paths.templates_dir / Path("common")) compound_types = read_namespace(root_namespace, [], allow_unregulated_fixed_port_id=True) language_context = LanguageContext('py' if implicit else None, '.py' if not implicit else None) namespace = build_namespace_tree(compound_types, root_namespace_dir, gen_paths.out_dir, language_context) generator = DSDLCodeGenerator(namespace, generate_namespace_types=YesNoDefault.NO, templates_dir=templates_dirs) generator.generate_all(False) # Now read back in and verify outfile = gen_paths.find_outfile_in_namespace("langtest.py.TestType", namespace) assert (outfile is not None) generated_values = {} # type: Dict with open(str(outfile), 'r') as python_file: exec(python_file.read(), generated_values) assert len(generated_values) > 0 lang_py_output = generated_values["tests"]["lang_py"] unique_name_evaluator(r'_NAME\d+_', lang_py_output["unique_name_0"]) unique_name_evaluator(r'_NAME\d+_', lang_py_output["unique_name_1"]) unique_name_evaluator(r'_name\d+_', lang_py_output["unique_name_2"]) assert "identifier_zero" == lang_py_output["id_0"] many_unique_names = lang_py_output.get("many_unique_names") if many_unique_names is not None: for name in many_unique_names: unique_name_evaluator(r'_f\d+_', name) return generated_values