def test_three_roots(gen_paths): # type: ignore """ Generates a type that uses another type from a different root namespace. """ root_namespace = str(gen_paths.dsdl_dir / Path("scotec")) includes = [ str(gen_paths.dsdl_dir / Path("huckco")), str(gen_paths.dsdl_dir / Path("esmeinc")) ] compound_types = read_namespace(root_namespace, includes, allow_unregulated_fixed_port_id=True) language_context = LanguageContext(extension='.json') namespace = build_namespace_tree(compound_types, root_namespace, gen_paths.out_dir, language_context) generator = DSDLCodeGenerator(namespace, templates_dir=gen_paths.templates_dir) generator.generate_all(False) # Now read back in and verify outfile = gen_paths.find_outfile_in_namespace("scotec.FatherType", namespace) assert (outfile is not None) with open(str(outfile), 'r') as json_file: json_blob = json.load(json_file) assert len(json_blob) > 0 assert len(json_blob['scotec.FatherType']['attributes']) == 2 assert json_blob['scotec.FatherType']['attributes'][0][ 'type'] == 'huckco.SonType.0.1' assert json_blob['scotec.FatherType']['attributes'][1][ 'type'] == 'esmeinc.DaughterType.0.1'
def _test_common_namespace(gen_paths, target_language: str = 'js', extension: str = '.json'): # type: ignore root_namespace_dir = gen_paths.dsdl_dir / pathlib.Path("uavcan") root_namespace = str(root_namespace_dir) return nunavut.build_namespace_tree(pydsdl.read_namespace(root_namespace, []), root_namespace_dir, gen_paths.out_dir, LanguageContext(target_language, extension=extension))
def test_ifuses(gen_paths, configurable_language_context_factory, std, expect_uses_variant): # type: ignore """ Verifies that instance tests are added for pydsdl.SerializableType and all of its subclasses. """ config_overrides = {"nunavut.lang.cpp": {"options": {"std": std}}} root_namespace_dir = gen_paths.dsdl_dir / Path("denada") type_map = read_namespace(str(root_namespace_dir), []) language_context = configurable_language_context_factory( config_overrides, "cpp") namespace = build_namespace_tree(type_map, root_namespace_dir, gen_paths.out_dir, language_context) generator = DSDLCodeGenerator(namespace, templates_dir=gen_paths.templates_dir) generator.generate_all(False) outfile = gen_paths.find_outfile_in_namespace("denada.serializables", namespace) assert outfile is not None with open(str(outfile), "r") as json_file: json_blob = json.load(json_file) assert json_blob is not None assert not json_blob["never"] assert json_blob["ifuses_std_variant"] is expect_uses_variant assert json_blob["ifnuses_std_variant"] is not expect_uses_variant
def test_filter_full_reference_name_via_template(gen_paths, language_name, namespace_separator): root_path = str(gen_paths.dsdl_dir / Path("uavcan")) output_path = gen_paths.out_dir / 'filter_and_test' compound_types = read_namespace(root_path, []) language_context = LanguageContext(target_language=language_name) namespace = build_namespace_tree(compound_types, root_path, output_path, language_context) template_path = gen_paths.templates_dir / Path('full_reference_test') generator = DSDLCodeGenerator(namespace, templates_dir=template_path) generator.generate_all() outfile = gen_paths.find_outfile_in_namespace("uavcan.str.bar_svc", namespace) assert (outfile is not None) with open(str(outfile), 'r') as json_file: json_blob = json.load(json_file) assert json_blob is not None assert json_blob['parent'][ 'full_reference_name'] == 'uavcan.str.bar_svc_1_0'.replace( '.', namespace_separator) assert json_blob['parent'][ 'short_reference_name'] == 'bar_svc' if language_name == 'cpp' else 'bar_svc_1_0' assert json_blob['request'][ 'full_reference_name'] == 'uavcan.str.bar_svc.Request_1_0'.replace( '.', namespace_separator) assert json_blob['request']['short_reference_name'] == 'Request_1_0' assert json_blob['response'][ 'full_reference_name'] == 'uavcan.str.bar_svc.Response_1_0'.replace( '.', namespace_separator) assert json_blob['response']['short_reference_name'] == 'Response_1_0'
def test_get_templates(gen_paths): # type: ignore """ Verifies the nunavut.jinja.Generator.get_templates() method. """ root_namespace_dir = gen_paths.dsdl_dir / Path("uavcan") root_namespace = str(root_namespace_dir) serializable_types = read_namespace(root_namespace, []) language_context = LanguageContext(extension='.json') namespace = build_namespace_tree(serializable_types, root_namespace_dir, gen_paths.out_dir, language_context) generator = Generator(namespace, templates_dir=gen_paths.templates_dir) templates = generator.get_templates() count = 0 for template in templates: count += 1 assert count > 0 # Do it twice just to cover in-memory cache templates = generator.get_templates() count = 0 for template in templates: count += 1 assert count > 0
def test_instance_tests(gen_paths): # type: ignore """ Verifies that instance tests are added for pydsdl.SerializableType and all of its subclasses. """ root_namespace_dir = gen_paths.dsdl_dir / Path("buncho") type_map = read_namespace(str(root_namespace_dir), []) language_context = LanguageContext('js') namespace = build_namespace_tree(type_map, root_namespace_dir, gen_paths.out_dir, language_context) generator = DSDLCodeGenerator(namespace, templates_dir=gen_paths.templates_dir) generator.generate_all(False) outfile = gen_paths.find_outfile_in_namespace("buncho.serializables", namespace) assert (outfile is not None) with open(str(outfile), 'r') as json_file: json_blob = json.load(json_file) assert json_blob is not None assert json_blob["this_field_is_an_int32"]["isSerializableType"] is True assert json_blob["this_field_is_an_int32"]["isIntegerType"] is True assert json_blob["this_field_is_an_int32"]["isFloatType"] is False assert json_blob["this_field_is_an_int32"]["isIntegerType_field"] is True assert json_blob["this_field_is_a_float"]["isSerializableType"] is True assert json_blob["this_field_is_a_float"]["isIntegerType"] is False assert json_blob["this_field_is_a_float"]["isIntegerType_field"] is False assert json_blob["this_field_is_a_float"]["isFloatType"] is True
def test_custom_filter_and_test(gen_paths): # type: ignore root_path = str(gen_paths.dsdl_dir / Path("uavcan")) output_path = gen_paths.out_dir / 'filter_and_test' compound_types = read_namespace(root_path, []) language_context = LanguageContext(extension='.json') namespace = build_namespace_tree(compound_types, root_path, output_path, language_context) template_path = gen_paths.templates_dir / Path('custom_filter_and_test') generator = DSDLCodeGenerator( namespace, templates_dir=template_path, additional_filters={'custom_filter': lambda T: 'hi mum'}, additional_tests={'custom_test': lambda T: True}) generator.generate_all() outfile = gen_paths.find_outfile_in_namespace( "uavcan.time.SynchronizedTimestamp", namespace) assert (outfile is not None) with open(str(outfile), 'r') as json_file: json_blob = json.load(json_file) assert json_blob is not None assert json_blob['filter_result'] == 'hi mum' assert json_blob['test_result'] == 'yes'
def ptest_lang_cpp(gen_paths, implicit): # type: ignore """Generates and verifies JSON with values filtered using the cpp language module. """ root_namespace_dir = gen_paths.dsdl_dir / Path("langtest") root_namespace = str(root_namespace_dir) compound_types = read_namespace(root_namespace, [], allow_unregulated_fixed_port_id=True) if implicit: templates_dirs = [gen_paths.templates_dir / Path("implicit") / Path("cpp")] else: templates_dirs = [gen_paths.templates_dir / Path("explicit")] templates_dirs.append(gen_paths.templates_dir / Path("common")) language_context = LanguageContext('cpp' if implicit else None, '.hpp' if not implicit else None) namespace = build_namespace_tree(compound_types, root_namespace_dir, gen_paths.out_dir, language_context) generator = Generator(namespace, templates_dir=templates_dirs) generator.generate_all(False) # Now read back in and verify outfile = gen_paths.find_outfile_in_namespace("langtest.cpp.ns.TestType", namespace) assert (outfile is not None) generated_values = {} # type: Dict with open(str(outfile), 'r') as python_file: exec(python_file.read(), generated_values) assert len(generated_values) lang_cpp_output = generated_values["tests"]["lang_cpp"] assert lang_cpp_output["namespace"] == "langtest.cpp.ns" assert lang_cpp_output["namespace_open"] == r'''namespace langtest { namespace cpp { namespace ns { ''' assert lang_cpp_output["namespace_open_wo_nl"] == r'''namespace langtest { namespace cpp { namespace ns { ''' assert lang_cpp_output["namespace_close"] == r'''} } } ''' assert lang_cpp_output["namespace_close_w_comments"] == r'''} // namespace ns } // namespace cpp } // namespace langtest ''' return generated_values
def gen_test_namespace(gen_paths: typing.Any, language_context: LanguageContext) -> \ typing.Tuple[Namespace, str, typing.List[CompositeType]]: root_namespace_path = str(gen_paths.dsdl_dir / Path("scotec")) includes = [str(gen_paths.dsdl_dir / Path("uavcan"))] compound_types = read_namespace(root_namespace_path, includes, allow_unregulated_fixed_port_id=True) return build_namespace_tree(compound_types, root_namespace_path, gen_paths.out_dir, language_context), root_namespace_path, compound_types
def ptest_lang_py(gen_paths, implicit, unique_name_evaluator): # type: ignore """ Generates and verifies JSON with values filtered using the python language support module. """ root_namespace_dir = gen_paths.dsdl_dir / Path("langtest") root_namespace = str(root_namespace_dir) if implicit: templates_dirs = [ gen_paths.templates_dir / Path("implicit") / Path("py") ] else: templates_dirs = [gen_paths.templates_dir / Path("explicit")] templates_dirs.append(gen_paths.templates_dir / Path("common")) compound_types = read_namespace(root_namespace, [], allow_unregulated_fixed_port_id=True) language_context = LanguageContext('py' if implicit else None, '.py' if not implicit else None) namespace = build_namespace_tree(compound_types, root_namespace_dir, gen_paths.out_dir, language_context) generator = DSDLCodeGenerator(namespace, generate_namespace_types=YesNoDefault.NO, templates_dir=templates_dirs) generator.generate_all(False) # Now read back in and verify outfile = gen_paths.find_outfile_in_namespace("langtest.py.TestType", namespace) assert (outfile is not None) generated_values = {} # type: Dict with open(str(outfile), 'r') as python_file: exec(python_file.read(), generated_values) assert len(generated_values) > 0 lang_py_output = generated_values["tests"]["lang_py"] unique_name_evaluator(r'_NAME\d+_', lang_py_output["unique_name_0"]) unique_name_evaluator(r'_NAME\d+_', lang_py_output["unique_name_1"]) unique_name_evaluator(r'_name\d+_', lang_py_output["unique_name_2"]) assert "identifier_zero" == lang_py_output["id_0"] many_unique_names = lang_py_output.get("many_unique_names") if many_unique_names is not None: for name in many_unique_names: unique_name_evaluator(r'_f\d+_', name) return generated_values
def test_realgen(gen_paths, lang_key): # type: ignore """ Sanity test that runs through the entire public, regulated set of UAVCAN types and generates some basic C code. """ root_namespace_dir = gen_paths.root_dir / Path("submodules") / Path( "public_regulated_data_types") / Path("uavcan") type_map = read_namespace(str(root_namespace_dir), '') language_context = LanguageContext(lang_key) namespace = build_namespace_tree(type_map, root_namespace_dir, gen_paths.out_dir, language_context) generator = Generator(namespace, False, language_context) generator.generate_all(False)
def test_TestType_0_1(gen_paths): # type: ignore """ Generates a JSON blob and then reads it back in. This test uses an extremely simple DSDL type to generate JSON then reads this JSON back in and parses it using Python's built-in parser. """ root_namespace_dir = gen_paths.dsdl_dir / Path("uavcan") root_namespace = str(root_namespace_dir) language_context = LanguageContext(extension='.json') namespace = build_namespace_tree(read_namespace(root_namespace, ''), root_namespace_dir, gen_paths.out_dir, language_context) generator = Generator(namespace, False, language_context, gen_paths.templates_dir) generator.generate_all(False) # Now read back in and verify outfile = gen_paths.find_outfile_in_namespace("uavcan.test.TestType", namespace) assert (outfile is not None) with open(str(outfile), 'r') as json_file: json_blob = json.load(json_file) assert json_blob is not None assert len(json_blob) == 1 uavcan_namespace = json_blob[0] assert uavcan_namespace["type"] == "namespace" assert uavcan_namespace["name"] == "uavcan.test" assert len(uavcan_namespace["contents"]) == 1 test_type = uavcan_namespace["contents"][0] assert test_type["name"] == "TestType" assert test_type["version"]["major"] == 0 assert test_type["version"]["minor"] == 1 assert len(test_type["attributes"]) == 2 test_attr_0 = test_type["attributes"][0] assert test_attr_0["name"] == "data" assert test_attr_0["type"] == "uint56" assert test_attr_0["bit_length"] == 56 assert test_attr_0["cast_mode"] == "TRUNCATED" test_attr_1 = test_type["attributes"][1] assert test_attr_1["name"] == "const_bool_example" assert test_attr_1["type"] == "uint1" assert test_attr_1["bit_length"] == 1 assert test_attr_1["cast_mode"] == "SATURATED"
def _test_common_namespace(gen_paths, # type: ignore target_language: str = 'js', extension: str = '.json', additional_config: typing.Optional[typing.Mapping[str, str]] = None): root_namespace_dir = gen_paths.dsdl_dir / pathlib.Path("uavcan") root_namespace = str(root_namespace_dir) lctx = LanguageContext(target_language, extension=extension) if additional_config is not None: ln_package_name = 'nunavut.lang.{}'.format(target_language) for name, value in additional_config.items(): lctx.config.set(ln_package_name, name, value) return nunavut.build_namespace_tree(pydsdl.read_namespace(root_namespace, []), root_namespace_dir, gen_paths.out_dir, lctx)
def test_default_array_type_cpp(gen_paths): # type: ignore """ Verify that the default array type for C++ is as expected. """ root_namespace = str(gen_paths.dsdl_dir / Path("radar")) compound_types = pydsdl.read_namespace( root_namespace, [], allow_unregulated_fixed_port_id=True) language_context = LanguageContext('cpp') namespace = build_namespace_tree(compound_types, root_namespace, gen_paths.out_dir, language_context) generator = DSDLCodeGenerator(namespace) generator.generate_all(False) assert_pattern_match_in_file( gen_paths.find_outfile_in_namespace("radar.Phased", namespace), re.compile(r'\s*std::array<float,3>\s+bank_normal_rads;\s*'))
def setup(self) -> None: """ Required to prepare this object to run (run method will raise exceptions if called before this method). While this may seem a bit clunky it helps isolate errors to two distinct stages; setup and run. Setup never generates anything. It only parses the inputs and creates the generator arguments. """ # # nunavut : parse inputs # language_context = self._create_language_context() if self._args.generate_support != "only": type_map = pydsdl.read_namespace( self._args.root_namespace, self._extra_includes, allow_unregulated_fixed_port_id=self._args. allow_unregulated_fixed_port_id, ) else: type_map = [] self._root_namespace = nunavut.build_namespace_tree( type_map, self._args.root_namespace, self._args.outdir, language_context) # # nunavut : create generators # generator_args = { "generate_namespace_types": (YesNoDefault.YES if self._args.generate_namespace_types else YesNoDefault.DEFAULT), "templates_dir": (pathlib.Path(self._args.templates) if self._args.templates is not None else None), "trim_blocks": self._args.trim_blocks, "lstrip_blocks": self._args.lstrip_blocks, "post_processors": self._build_post_processor_list_from_args(), } self._generators = create_generators(self._root_namespace, **generator_args)
def test_issue_136(gen_paths, lang_key: str, include_format: str): # type: ignore """ Generates a type that has two different versions using the built-in language support and verifies that the header include guards include the type version. This verifies fix #136 has not regressed. """ covid_versions = [pydsdl.Version(1, 9), pydsdl.Version(1, 10)] root_namespace = str(gen_paths.dsdl_dir / Path("viruses")) compound_types = pydsdl.read_namespace( root_namespace, [], allow_unregulated_fixed_port_id=True) language_context = LanguageContext( lang_key, omit_serialization_support_for_target=True) namespace = build_namespace_tree(compound_types, root_namespace, gen_paths.out_dir, language_context) generator = DSDLCodeGenerator(namespace) generator.generate_all(False) for covid_version in covid_versions: include_guard_start = re.compile(r'#ifndef {}\b'.format( include_format.format(major=covid_version.major, minor=covid_version.minor))) include_guard_def = re.compile(r'#define {}\b'.format( include_format.format(major=covid_version.major, minor=covid_version.minor))) # Now read back in and verify outfile = gen_paths.find_outfile_in_namespace( "viruses.covid", namespace, type_version=covid_version) assert (outfile is not None) found_open_line = 0 found_def_line = 0 with open(str(outfile), 'r') as header_file: line_no = 1 for line in header_file: if include_guard_start.match(line): found_open_line = line_no if include_guard_def.match(line): found_def_line = line_no break line_no += 1 assert (found_open_line > 0) assert (found_def_line > found_open_line)
def test_template_assert(gen_paths): # type: ignore """ Tests our template assertion extension. """ root_path = str(gen_paths.dsdl_dir / Path("uavcan")) output_path = gen_paths.out_dir / 'assert' compound_types = read_namespace(root_path, []) language_context = LanguageContext(extension='.json') namespace = build_namespace_tree(compound_types, root_path, output_path, language_context) template_path = gen_paths.templates_dir / Path('assert') generator = DSDLCodeGenerator(namespace, templates_dir=template_path) try: generator.generate_all() assert False except TemplateAssertionError as e: e.filename == str(template_path / "Any.j2") e.filename == 2 e.message == 'Template assertion failed.'
def test_type_to_include(gen_paths): # type: ignore """Test the type_to_include filter.""" root_path = (gen_paths.dsdl_dir / Path("uavcan")).as_posix() output_path = gen_paths.out_dir / 'type_to_include' compound_types = read_namespace(root_path, []) language_context = LanguageContext(extension='.json') namespace = build_namespace_tree(compound_types, root_path, output_path, language_context) template_path = gen_paths.templates_dir / Path('type_to_include') generator = DSDLCodeGenerator(namespace, templates_dir=template_path) generator.generate_all() outfile = gen_paths.find_outfile_in_namespace( "uavcan.time.SynchronizedTimestamp", namespace) assert (outfile is not None) with open(str(outfile), 'r') as json_file: json_blob = json.load(json_file) assert json_blob is not None assert json_blob['include'] == "uavcan/time/SynchronizedTimestamp_1_0.json"
def test_anygen(gen_paths, lang_key): # type: ignore """ Verifies that any dsdl type will resolve to an ``Any`` template. """ root_namespace_dir = gen_paths.dsdl_dir / Path("uavcan") type_map = read_namespace(str(root_namespace_dir), []) language_context = LanguageContext(extension='.json') namespace = build_namespace_tree(type_map, root_namespace_dir, str(gen_paths.out_dir), language_context) generator = Generator(namespace, templates_dir=gen_paths.templates_dir) generator.generate_all(False) outfile = gen_paths.find_outfile_in_namespace( "uavcan.time.SynchronizedTimestamp", namespace) assert (outfile is not None) with open(str(outfile), 'r') as json_file: json_blob = json.load(json_file) assert json_blob is not None assert json_blob["full_name"] == "uavcan.time.SynchronizedTimestamp"
def test_one_template(gen_paths): # type: ignore """ Verifies that we can use only a SeralizableType.j2 as the only template when no service types are present. """ root_namespace_dir = gen_paths.dsdl_dir / Path("uavcan") root_namespace = str(root_namespace_dir) serializable_types = read_namespace(root_namespace, []) language_context = LanguageContext(extension='.json') namespace = build_namespace_tree(serializable_types, root_namespace_dir, gen_paths.out_dir, language_context) generator = Generator(namespace, templates_dir=gen_paths.templates_dir) generator.generate_all(False) outfile = gen_paths.find_outfile_in_namespace("uavcan.time.TimeSystem", namespace) assert (outfile is not None) with open(str(outfile), 'r') as json_file: json_blob = json.load(json_file) assert json_blob['uavcan.time.TimeSystem']['namespace'] == 'uavcan.time' assert json_blob['uavcan.time.TimeSystem']['is_serializable']
def test_filter_to_template_unique(gen_paths): """ Cover issue #88 """ root_path = str(gen_paths.dsdl_dir / Path("one")) output_path = gen_paths.out_dir / 'to_unique' compound_types = read_namespace(root_path, []) language_context = LanguageContext(target_language='c') namespace = build_namespace_tree(compound_types, root_path, output_path, language_context) template_path = gen_paths.templates_dir / Path('to_unique') generator = DSDLCodeGenerator(namespace, templates_dir=template_path) generator.generate_all() outfile = gen_paths.find_outfile_in_namespace("one.foo", namespace) assert (outfile is not None) expected = '_f0_\n_f1_\n_f2_\n_f3_\n\n_f4_\n_f5_\n_f6_\n_f7_\n\n_f8_\n_f9_\n_f10_\n_f11_\n' with open(str(outfile), 'r') as foo_file: actual = foo_file.read() assert expected == actual
def test_var_array_override_cpp(gen_paths): # type: ignore """ Make sure we can override the type generated for variable-length arrays. """ language_option_overrides = { 'variable_array_type': 'scotec::TerribleArray<{TYPE},{MAX_SIZE}>' } root_namespace = str(gen_paths.dsdl_dir / Path("radar")) compound_types = pydsdl.read_namespace( root_namespace, [], allow_unregulated_fixed_port_id=True) language_context = LanguageContext( 'cpp', language_options=language_option_overrides) namespace = build_namespace_tree(compound_types, root_namespace, gen_paths.out_dir, language_context) generator = DSDLCodeGenerator(namespace) generator.generate_all(False) assert_pattern_match_in_file( gen_paths.find_outfile_in_namespace("radar.Phased", namespace), re.compile( r'\s*scotec::TerribleArray<float,2677>\s+antennae_per_bank;\s*'), re.compile(r'\s*std::array<float,3>\s+bank_normal_rads;\s*'))
def _run(args: argparse.Namespace, extra_includes: typing.List[str]) -> int: # noqa: C901 ''' Post command-line setup and parsing logic to execute nunavut library routines based on input. ''' # # nunavut : load module # import pydsdl import nunavut import nunavut.jinja import nunavut.lang def _build_ext_program_postprocessor(program: str, args: argparse.Namespace) \ -> nunavut.postprocessors.FilePostProcessor: subprocess_args = [program] if hasattr( args, 'pp_run_program_arg') and args.pp_run_program_arg is not None: for program_arg in args.pp_run_program_arg: subprocess_args.append(program_arg) return nunavut.postprocessors.ExternalProgramEditInPlace( subprocess_args) def _build_post_processor_list_from_args(args: argparse.Namespace) \ -> typing.List[nunavut.postprocessors.PostProcessor]: ''' Return a list of post processors setup based on the provided commandline arguments. This list may be empty but the function will not return None. ''' post_processors = [ ] # type: typing.List[nunavut.postprocessors.PostProcessor] if args.pp_trim_trailing_whitespace: post_processors.append( nunavut.postprocessors.TrimTrailingWhitespace()) if hasattr(args, 'pp_max_emptylines') and args.pp_max_emptylines is not None: post_processors.append( nunavut.postprocessors.LimitEmptyLines(args.pp_max_emptylines)) if hasattr(args, 'pp_run_program') and args.pp_run_program is not None: post_processors.append( _build_ext_program_postprocessor(args.pp_run_program, args)) post_processors.append( nunavut.postprocessors.SetFileMode(args.file_mode)) return post_processors # # nunavut: language context. # language_options = dict() if args.target_endianness is not None: language_options['target_endianness'] = args.target_endianness language_options[ 'omit_float_serialization_support'] = args.omit_float_serialization_support language_options[ 'enable_serialization_asserts'] = args.enable_serialization_asserts language_options[ 'enable_override_variable_array_capacity'] = args.enable_override_variable_array_capacity language_context = nunavut.lang.LanguageContext( args.target_language, args.output_extension, args.namespace_output_stem, omit_serialization_support_for_target=args.omit_serialization_support, language_options=language_options, include_experimental_languages=args.experimental_languages) # # nunavut: inferred target language from extension # if args.output_extension is not None and language_context.get_target_language( ) is None: inferred_target_language_name = None # type: typing.Optional[str] for name, lang in language_context.get_supported_languages().items(): extension = lang.get_config_value('extension', None) if extension is not None and extension == args.output_extension: inferred_target_language_name = name break if inferred_target_language_name is not None: logging.info( 'Inferring target language %s based on extension "%s".', inferred_target_language_name, args.output_extension) language_context = nunavut.lang.LanguageContext( inferred_target_language_name, args.output_extension, args.namespace_output_stem, omit_serialization_support_for_target=args. omit_serialization_support, language_options=language_options) elif args.templates is None: logging.warn( textwrap.dedent(''' *********************************************************************** No target language was given, none could be inferred from the output extension (-e) argument "%s", and no user templates were specified. You will fail to find templates if you have provided any DSDL types to generate. *********************************************************************** ''').lstrip(), args.output_extension) # # nunavut : parse # if args.generate_support != 'only': type_map = pydsdl.read_namespace(args.root_namespace, extra_includes, allow_unregulated_fixed_port_id=args. allow_unregulated_fixed_port_id) else: type_map = [] root_namespace = nunavut.build_namespace_tree(type_map, args.root_namespace, args.outdir, language_context) # # nunavut : generate # generator_args = { 'generate_namespace_types': (nunavut.YesNoDefault.YES if args.generate_namespace_types else nunavut.YesNoDefault.DEFAULT), 'templates_dir': (pathlib.Path(args.templates) if args.templates is not None else None), 'trim_blocks': args.trim_blocks, 'lstrip_blocks': args.lstrip_blocks, 'post_processors': _build_post_processor_list_from_args(args) } from nunavut.generators import create_generators generator, support_generator = create_generators(root_namespace, **generator_args) if args.list_outputs: if args.generate_support != 'only': for output_path in generator.generate_all(is_dryrun=True): sys.stdout.write(str(output_path)) sys.stdout.write(';') if _should_generate_support(args): for output_path in support_generator.generate_all(is_dryrun=True): sys.stdout.write(str(output_path)) sys.stdout.write(';') return 0 if args.list_inputs: if args.generate_support != 'only': for input_path in generator.get_templates(): sys.stdout.write(str(input_path.resolve())) sys.stdout.write(';') if _should_generate_support(args): for input_path in support_generator.get_templates(): sys.stdout.write(str(input_path.resolve())) sys.stdout.write(';') if args.generate_support != 'only': if generator.generate_namespace_types: for output_type, _ in root_namespace.get_all_types(): sys.stdout.write(str(output_type.source_file_path)) sys.stdout.write(';') else: for output_type, _ in root_namespace.get_all_datatypes(): sys.stdout.write(str(output_type.source_file_path)) sys.stdout.write(';') return 0 if _should_generate_support(args): support_generator.generate_all(is_dryrun=args.dry_run, allow_overwrite=not args.no_overwrite) if args.generate_support != 'only': generator.generate_all(is_dryrun=args.dry_run, allow_overwrite=not args.no_overwrite) return 0
def generate_dsdl( self, build_uuid: str, urls: List[str], target_lang: str, target_endian: str, flags: List[str], doc_url: str, ): """ Generate (transpile) the DSDL code. """ # Create working directory arch_dir = Path(tempfile.mkdtemp(prefix="pyuavcan-cli-dsdl")) # Get uploaded files from minio and unzip objects = storage.list_objects(f"{build_uuid}", prefix="uploads/", recursive=True) for obj in objects: # Create temp file for zip archive _, file_path = tempfile.mkstemp(".zip", "dsdl") # Save and unzip data = storage.get_object(obj.bucket_name, obj.object_name.encode("utf-8")) with open(file_path, "wb") as file_data: for d in data.stream(32 * 1024): file_data.write(d) unzip_to_directory(file_path, arch_dir) # Delete zip file os.unlink(file_path) # pylint: disable=invalid-name for c, url in enumerate(urls): self.update_state( state="PROGRESS", meta={ "current": c + 1, "total": len(urls), "status": f"Fetching remote namespace {url}", }, ) fetch_remote_namespace(url, arch_dir) # Gather all the namespace directories inner = [d for d in Path(arch_dir).iterdir() if d.is_dir()] namespaces = [] for path in inner: subnss = [ d for d in path.iterdir() if d.is_dir() and not d.name.startswith(".") ] if len(subnss) > 0: namespaces.extend( [d for d in path.iterdir() if d.is_dir() and not d.name.startswith(".")] ) else: namespaces.append(path) out_dir = Path(tempfile.mkdtemp(prefix="nunavut-out")) # Generate nnvg command # pylint: disable=invalid-name command = "" for c, ns_dir in enumerate(namespaces): if c > 0: command += "\n" command += "nnvg " command += f"--target-language {target_lang} " if target_endian != "any": command += f"--target-endianness {target_endian} " command += f"{' '.join(flags)}" command += f" dsdl_src{str(ns_dir).replace(str(arch_dir), '')}" for lookup_dir in namespaces: if lookup_dir != ns_dir: command += ( f" --lookup dsdl_src{str(lookup_dir).replace(str(arch_dir), '')}" ) self.update_state( state="PROGRESS", meta={ "current": 0, "total": len(namespaces), "status": "Preparing to generate namespaces", "command": command, }, ) # Parse DSDL # pylint: disable=invalid-name for c, namespace in enumerate(namespaces): namespace = str(namespace) self.update_state( state="PROGRESS", meta={ "current": c + 1, "total": len(namespaces), "status": "Generating namespace: " + namespace.split("/")[-1], "command": command, }, ) extra_includes = namespaces extra_includes = list(map(str, extra_includes)) try: compound_types = read_namespace( namespace, extra_includes, allow_unregulated_fixed_port_id=False ) except InvalidDefinitionError as error: text = str(error).replace(str(arch_dir), "") raise RuntimeError(f"{text}") from error # Select target language and configure context language_options = {} language_options["target_endianness"] = target_endian language_options["omit_float_serialization_support"] = ( "--omit-float-serialization-support" in flags ) language_options["enable_serialization_asserts"] = ( "--enable-serialization-asserts" in flags ) lang_context = LanguageContext( target_lang, omit_serialization_support_for_target="--omit-serialization-support" in flags, language_options=language_options, ) # Build namespace tree root_namespace = build_namespace_tree( compound_types, namespace, out_dir, lang_context ) # Generate code generator, support_generator = create_generators(root_namespace) generator.generate_all() support_generator.generate_all() if target_lang == "html": # Upload generated files for file in out_dir.glob("**/*"): if file.is_file(): rel_path = doc_url / file.relative_to(out_dir) try: if storage.stat_object("docs", f"{rel_path}"): raise RuntimeError("Specified doc URL is already taken") except minio.error.S3Error: pass storage.fput_object( "docs", str(rel_path), str(file.absolute()), content_type="text/html" ) return { "current": len(namespaces), "total": len(namespaces), "command": command, "type": "htmldoc", "status": "Complete!", "result": [ f"{settings.MINIO_DOCS}/{doc_url}/{str(ns).split('/')[-1]}/index.html" for ns in namespaces ], } else: # Zip result zipfile_name = f"nunavut_out-{uuid.uuid4()}.zip" zipf = zipfile.ZipFile(f"/tmp/{zipfile_name}", "w", zipfile.ZIP_DEFLATED) zipdir(out_dir, zipf) zipf.close() # Upload result storage.fput_object( "results", zipfile_name, f"/tmp/{zipfile_name}", ) return { "current": len(namespaces), "total": len(namespaces), "command": command, "type": "generic", "status": "Complete!", "result": f"{settings.MINIO_RESULTS}/{zipfile_name}", }
def ptest_lang_c(gen_paths: Any, implicit: bool, unique_name_evaluator: Any, use_standard_types: bool, configurable_language_context_factory: Callable) -> Dict: """ Generates and verifies JSON with values filtered using the c language support module. """ root_namespace_dir = gen_paths.dsdl_dir / Path("langtest") if implicit: templates_dirs = [gen_paths.templates_dir / Path("implicit") / Path("c")] else: templates_dirs = [gen_paths.templates_dir / Path("explicit")] templates_dirs.append(gen_paths.templates_dir / Path("common")) root_namespace = str(root_namespace_dir) compound_types = read_namespace(root_namespace, [], allow_unregulated_fixed_port_id=True) config_overrides = {'nunavut.lang.c': {'use_standard_types': use_standard_types}} language_context = configurable_language_context_factory(config_overrides, 'c' if implicit else None, '.h' if not implicit else None) namespace = build_namespace_tree(compound_types, root_namespace_dir, gen_paths.out_dir, language_context) generator = DSDLCodeGenerator(namespace, templates_dir=templates_dirs) generator.generate_all(False) # Now read back in and verify outfile = gen_paths.find_outfile_in_namespace("langtest.c.TestType", namespace) assert (outfile is not None) generated_values = {} # type: Dict with open(str(outfile), 'r') as python_file: exec(python_file.read(), generated_values) assert len(generated_values) > 0 lang_c_output = generated_values["tests"]["lang_c"] assert lang_c_output["namespace"] == "langtest.c" assert lang_c_output["namespace_macrofy"] == "LANGTEST_C" if use_standard_types: assert lang_c_output["ctype truncated uint8"] == "uint8_t" assert lang_c_output["ctype saturated int8"] == "int8_t" assert lang_c_output["ctype truncated uint9"] == "uint16_t" assert lang_c_output["ctype saturated int9"] == "int16_t" else: assert lang_c_output["ctype truncated uint8"] == "unsigned char" assert lang_c_output["ctype saturated int8"] == "char" assert lang_c_output["ctype truncated uint9"] == "unsigned int" assert lang_c_output["ctype saturated int9"] == "int" if use_standard_types: assert lang_c_output["ctype truncated uint32"] == "uint32_t" assert lang_c_output["ctype saturated int32"] == "int32_t" assert lang_c_output["ctype truncated uint64"] == "uint64_t" assert lang_c_output["ctype saturated int64"] == "int64_t" else: assert lang_c_output["ctype truncated uint32"] == "unsigned long" assert lang_c_output["ctype saturated int32"] == "long" assert lang_c_output["ctype truncated uint64"] == "unsigned long long" assert lang_c_output["ctype saturated int64"] == "long long" assert lang_c_output["ctype saturated bool"] == "bool" unique_name_evaluator(r'_nAME\d+_', lang_c_output["unique_name_0"]) unique_name_evaluator(r'_nAME\d+_', lang_c_output["unique_name_1"]) unique_name_evaluator(r'_naME\d+_', lang_c_output["unique_name_2"]) unique_name_evaluator(r'_\d+_', lang_c_output["unique_name_3"]) return generated_values
def ptest_lang_c(gen_paths, implicit, unique_name_evaluator): # type: ignore """ Generates and verifies JSON with values filtered using the c language support module. """ root_namespace_dir = gen_paths.dsdl_dir / Path("langtest") if implicit: templates_dirs = [ gen_paths.templates_dir / Path("implicit") / Path("c") ] else: templates_dirs = [gen_paths.templates_dir / Path("explicit")] templates_dirs.append(gen_paths.templates_dir / Path("common")) root_namespace = str(root_namespace_dir) compound_types = read_namespace(root_namespace, '', allow_unregulated_fixed_port_id=True) language_context = LanguageContext('c' if implicit else None, '.h' if not implicit else None) namespace = build_namespace_tree(compound_types, root_namespace_dir, gen_paths.out_dir, language_context) generator = Generator(namespace, False, language_context, templates_dirs) generator.generate_all(False) # Now read back in and verify outfile = gen_paths.find_outfile_in_namespace("langtest.c.TestType", namespace) assert (outfile is not None) generated_values = {} # type: Dict with open(str(outfile), 'r') as python_file: exec(python_file.read(), generated_values) assert len(generated_values) > 0 lang_c_output = generated_values["tests"]["lang_c"] assert lang_c_output["namespace"] == "langtest.c" assert lang_c_output["namespace_macrofy"] == "LANGTEST_C" assert lang_c_output["ctype_std truncated uint8"] == "uint8_t" assert lang_c_output["ctype_std saturated int8"] == "int8_t" assert lang_c_output["ctype_std truncated uint9"] == "uint16_t" assert lang_c_output["ctype_std saturated int9"] == "int16_t" assert lang_c_output["ctype truncated uint8"] == "unsigned char" assert lang_c_output["ctype saturated int8"] == "char" assert lang_c_output["ctype truncated uint9"] == "unsigned int" assert lang_c_output["ctype saturated int9"] == "int" assert lang_c_output["ctype_std truncated uint32"] == "uint32_t" assert lang_c_output["ctype_std saturated int32"] == "int32_t" assert lang_c_output["ctype_std truncated uint64"] == "uint64_t" assert lang_c_output["ctype_std saturated int64"] == "int64_t" assert lang_c_output["ctype truncated uint32"] == "unsigned long" assert lang_c_output["ctype saturated int32"] == "long" assert lang_c_output["ctype truncated uint64"] == "unsigned long long" assert lang_c_output["ctype saturated int64"] == "long long" assert lang_c_output["ctype saturated bool"] == "BOOL" assert lang_c_output["ctype_std saturated bool"] == "bool" unique_name_evaluator(r'_nAME\d+_', lang_c_output["unique_name_0"]) unique_name_evaluator(r'_nAME\d+_', lang_c_output["unique_name_1"]) unique_name_evaluator(r'_naME\d+_', lang_c_output["unique_name_2"]) unique_name_evaluator(r'_\d+_', lang_c_output["unique_name_3"]) return generated_values
def generate_package(root_namespace_directory: _AnyPath, lookup_directories: typing.Iterable[_AnyPath] = (), output_directory: typing.Optional[_AnyPath] = None, allow_unregulated_fixed_port_id: bool = False) -> GeneratedPackageInfo: """ This function runs the DSDL compiler, converting a specified DSDL root namespace into a Python package. In the generated package, nested DSDL namespaces are represented as Python subpackages, DSDL types as Python classes, type version numbers as class name suffixes separated via underscores (like ``Type_1_0``), constants as class attributes, fields as properties. For a more detailed information on how to use generated types, just generate them and read the resulting code -- it is made to be human-readable and contains docstrings. Generated packages can be freely moved around the file system or even deployed on other systems -- they are fully location-invariant. Generated packages do not automatically import their nested subpackages. For example, if the application needs to use ``uavcan.node.Heartbeat.1.0``, it has to ``import uavcan.node`` explicitly; doing just ``import uavcan`` is not sufficient. If the source definition contains identifiers, type names, namespace components, or other entities whose names are listed in ``nunavut.lang.py.PYTHON_RESERVED_IDENTIFIERS``, the compiler applies stropping by suffixing such entities with an underscore ``_``. A small subset of applications may require access to a generated entity without knowing in advance whether its name is a reserved identifier or not (i.e., whether it's stropped or not). To simplify usage, this submodule provides helper functions :func:`pyuavcan.dsdl.get_attribute` and :func:`pyuavcan.dsdl.set_attribute` that provide access to generated class/object attributes using their original names before stropping. Likewise, the function :func:`pyuavcan.dsdl.get_model` can find a generated type even if any of its name components are stropped; e.g., a DSDL type ``str.Type.1.0`` would be imported as ``str_.Type_1_0``. The above, however, is irrelevant for an application that does not require genericity (vast majority of applications don't), so a much easier approach in that case is just to look at the generated code and see if there are any stropped identifiers in it, and then just use appropriate names statically. The recommended usage pattern for this function is lazy generation. First, add the ``output_directory`` (if not specified it defaults to the current working directory) to :data:`sys.path` or to the ``PYTHONPATH`` environment variable to make the generated package(s) importable. Then try importing the target DSDL-generated package. If the attempt is successful, our job here is done. Otherwise, the package(s) need(s) to be generated by invoking this function, and then another import attempt will have to be made. Beware that before retrying the import it's necessary to invoke :func:`importlib.invalidate_caches`. A package generated for a particular version of PyUAVCAN may be incompatible with any other version of the library. If your application relies on lazy generation, consider including the library version string :data:`pyuavcan.__version__` in ``output_directory``, so that the generated package cache is invalidated automatically when a different version of the library is used. Having generated a package, consider updating the include path set of your Python IDE to take advantage of code completion and static type checking. When using PyUAVCAN from an interactive session (e.g., REPL or Jupyter), it is usually more convenient to generate packages using the command-line tool rather than invoking this function manually. Please refer to the command-line tool documentation for details. :param root_namespace_directory: The source DSDL root namespace directory path. The last component of the path is the name of the root namespace. For example, to generate package for the root namespace ``uavcan``, the path would be like ``foo/bar/uavcan``. :param lookup_directories: An iterable of DSDL root namespace directory paths where to search for referred DSDL definitions. The format of each path is the same as for the previous parameter; i.e., the last component of each path is a DSDL root namespace name. If you are generating code for a vendor-specific DSDL root namespace, make sure to provide at least the path to the standard ``uavcan`` namespace directory here. :param output_directory: The generated Python package directory will be placed into this directory. If not specified or None, the current working directory is used. For example, if this argument equals ``foo/bar``, and the DSDL root namespace name is ``uavcan``, the top-level ``__init__.py`` of the generated package will end up in ``foo/bar/uavcan/__init__.py``. The directory tree will be created automatically if it does not exist (like ``mkdir -p``). If the destination exists, it will be silently written over. In production, applications are recommended to shard the output directory by the library version number to avoid compatibility issues with code generated by older versions of the library. Don't forget to add the output directory to ``PYTHONPATH``, even if it's the current working directory. :param allow_unregulated_fixed_port_id: If True, the DSDL processing front-end will not reject unregulated data types with fixed port-ID. If you are not sure what it means, do not use it, and read the UAVCAN specification first. The default is False. :return: An instance of :class:`GeneratedPackageInfo` describing the generated package. :raises: :class:`OSError` if required operations on the file system could not be performed; ``pydsdl.InvalidDefinitionError`` if the source DSDL definitions are invalid; ``pydsdl.InternalError`` if there is a bug in the DSDL processing front-end; :class:`ValueError` if any of the arguments are otherwise invalid. The following table is an excerpt from the UAVCAN specification. Observe that *unregulated fixed port identifiers* are prohibited by default, but it can be overridden. +-------+---------------------------------------------------+----------------------------------------------+ |Scope | Regulated | Unregulated | +=======+===================================================+==============================================+ |Public |Standard and contributed (e.g., vendor-specific) |Definitions distributed separately from the | | |definitions. Fixed port identifiers are allowed; |UAVCAN specification. Fixed port identifiers | | |they are called *"regulated port-IDs"*. |are *not allowed*. | +-------+---------------------------------------------------+----------------------------------------------+ |Private|Nonexistent category. |Definitions that are not available to anyone | | | |except their authors. Fixed port identifiers | | | |are permitted (although not recommended); they| | | |are called *"unregulated fixed port-IDs"*. | +-------+---------------------------------------------------+----------------------------------------------+ Here is a brief usage example: >>> import sys >>> import pathlib >>> import tempfile >>> import importlib >>> import pyuavcan >>> dsdl_generated_dir = pathlib.Path(tempfile.gettempdir(), 'dsdl-for-my-program', pyuavcan.__version__) >>> dsdl_generated_dir.mkdir(parents=True, exist_ok=True) >>> sys.path.insert(0, str(dsdl_generated_dir)) >>> try: ... import sirius_cyber_corp ... import uavcan.si.sample.volumetric_flow_rate ... except (ImportError, AttributeError): ... _ = pyuavcan.dsdl.generate_package(root_namespace_directory='tests/dsdl/namespaces/sirius_cyber_corp', ... lookup_directories=['tests/public_regulated_data_types/uavcan'], ... output_directory=dsdl_generated_dir) ... _ = pyuavcan.dsdl.generate_package(root_namespace_directory='tests/public_regulated_data_types/uavcan', ... output_directory=dsdl_generated_dir) ... importlib.invalidate_caches() ... import sirius_cyber_corp ... import uavcan.si.sample.volumetric_flow_rate """ # Read the DSDL definitions composite_types = pydsdl.read_namespace(root_namespace_directory=str(root_namespace_directory), lookup_directories=list(map(str, lookup_directories)), allow_unregulated_fixed_port_id=allow_unregulated_fixed_port_id) root_namespace_name, = set(map(lambda x: x.root_namespace, composite_types)) # type: str, # Template primitives filters = { 'pickle': _pickle_object, 'numpy_scalar_type': _numpy_scalar_type, } tests = { 'PaddingField': lambda x: isinstance(x, pydsdl.PaddingField), 'saturated': _test_if_saturated, } # Generate code output_directory = pathlib.Path.cwd() if output_directory is None else output_directory language_context = nunavut.lang.LanguageContext('py', namespace_output_stem='__init__') root_ns = nunavut.build_namespace_tree(types=composite_types, root_namespace_dir=root_namespace_directory, output_dir=str(output_directory), language_context=language_context) generator = nunavut.jinja.Generator(namespace=root_ns, generate_namespace_types=nunavut.YesNoDefault.YES, templates_dir=_TEMPLATE_DIRECTORY, followlinks=True, additional_filters=filters, additional_tests=tests, post_processors=[ nunavut.postprocessors.SetFileMode(_OUTPUT_FILE_PERMISSIONS), nunavut.postprocessors.LimitEmptyLines(2), nunavut.postprocessors.TrimTrailingWhitespace(), ]) generator.generate_all() return GeneratedPackageInfo(path=pathlib.Path(output_directory) / pathlib.Path(root_namespace_name), models=composite_types, name=root_namespace_name)
def test_build_namespace_tree_from_nothing(gen_paths): # type: ignore namespace = build_namespace_tree([], str(gen_paths.dsdl_dir), gen_paths.out_dir, LanguageContext('js')) assert namespace is not None assert namespace.full_name == ''
def _run(args: argparse.Namespace, extra_includes: typing.List[str]) -> int: # noqa: C901 ''' Post command-line setup and parsing logic to execute nunavut library routines based on input. ''' # # nunavut : load module # import pydsdl import nunavut import nunavut.jinja import nunavut.lang def _build_ext_program_postprocessor(program: str, args: argparse.Namespace) \ -> nunavut.postprocessors.FilePostProcessor: subprocess_args = [program] if hasattr( args, 'pp_run_program_arg') and args.pp_run_program_arg is not None: for program_arg in args.pp_run_program_arg: subprocess_args.append(program_arg) return nunavut.postprocessors.ExternalProgramEditInPlace( subprocess_args) def _build_post_processor_list_from_args(args: argparse.Namespace) \ -> typing.List[nunavut.postprocessors.PostProcessor]: ''' Return a list of post processors setup based on the provided commandline arguments. This list may be empty but the function will not return None. ''' post_processors = [ ] # type: typing.List[nunavut.postprocessors.PostProcessor] if args.pp_trim_trailing_whitespace: post_processors.append( nunavut.postprocessors.TrimTrailingWhitespace()) if hasattr(args, 'pp_max_emptylines') and args.pp_max_emptylines is not None: post_processors.append( nunavut.postprocessors.LimitEmptyLines(args.pp_max_emptylines)) if hasattr(args, 'pp_run_program') and args.pp_run_program is not None: post_processors.append( _build_ext_program_postprocessor(args.pp_run_program, args)) post_processors.append( nunavut.postprocessors.SetFileMode(args.file_mode)) return post_processors # # nunavut: language context. # if args.list_inputs is not None and args.target_language is None and args.output_extension is None: # This is a special case where we know we'll never actually use the output extension since # we are only listing the input files. All other cases require either an output extension or # a valid target language. setattr(args, 'output_extension', '.tmp') language_context = nunavut.lang.LanguageContext( args.target_language, args.output_extension, args.namespace_output_stem, omit_serialization_support_for_target=args.omit_serialization_support) # # nunavut : parse # type_map = pydsdl.read_namespace(args.root_namespace, extra_includes) root_namespace = nunavut.build_namespace_tree(type_map, args.root_namespace, args.outdir, language_context) # # nunavut : generate # generator_args = { 'generate_namespace_types': (nunavut.YesNoDefault.YES if args.generate_namespace_types else nunavut.YesNoDefault.DEFAULT), 'templates_dir': (pathlib.Path(args.templates) if args.templates is not None else None), 'trim_blocks': args.trim_blocks, 'lstrip_blocks': args.lstrip_blocks, 'post_processors': _build_post_processor_list_from_args(args) } from nunavut.generators import create_generators generator, support_generator = create_generators(root_namespace, **generator_args) if args.list_outputs: for output_path in generator.generate_all(is_dryrun=True): sys.stdout.write(str(output_path)) sys.stdout.write(';') for output_path in support_generator.generate_all(is_dryrun=True): sys.stdout.write(str(output_path)) sys.stdout.write(';') return 0 if args.list_inputs: for input_path in generator.get_templates(): sys.stdout.write(str(input_path.resolve())) sys.stdout.write(';') for input_path in support_generator.get_templates(): sys.stdout.write(str(input_path.resolve())) sys.stdout.write(';') if generator.generate_namespace_types: for output_type, _ in root_namespace.get_all_types(): sys.stdout.write(str(output_type.source_file_path)) sys.stdout.write(';') else: for output_type, _ in root_namespace.get_all_datatypes(): sys.stdout.write(str(output_type.source_file_path)) sys.stdout.write(';') return 0 if args.omit_serialization_support is None or not args.omit_serialization_support: support_generator.generate_all(is_dryrun=args.dry_run, allow_overwrite=not args.no_overwrite) generator.generate_all(is_dryrun=args.dry_run, allow_overwrite=not args.no_overwrite) return 0
def _test_common_namespace(gen_paths): # type: ignore root_namespace_dir = gen_paths.dsdl_dir / pathlib.Path("uavcan") root_namespace = str(root_namespace_dir) return nunavut.build_namespace_tree( pydsdl.read_namespace(root_namespace, ''), root_namespace_dir, gen_paths.out_dir, LanguageContext('js'))