def test_load_integration_file( tmp_path: Path, content: str, error_msg: Sequence[str], line: int, column: int ) -> None: test_rfi = tmp_path / "test.rfi" test_rfi.write_text(content) integration = Integration() error = RecordFluxError() regex = rf"^{test_rfi}:{line}:{column}: parser: error: " for elt in error_msg: regex += elt regex += rf'.*in "{test_rfi}", line [0-9]+, column [0-9]+.*' regex += "$" compiled_regex = re.compile(regex, re.DOTALL) with pytest.raises(RecordFluxError, match=compiled_regex): integration.load_integration_file(test_rfi, error) error.propagate()
def main() -> int: parser = Parser() parser.parse(*SPECIFICATION_FILES) for model in [parser.create_model(), *MODELS]: Generator( "RFLX", reproducible=True, ignore_unsupported_checksum=True, ).generate(model, Integration(), OUTPUT_DIRECTORY) generate(SHARED_DIRECTORY) shared_files = { f.name: f for f in (SHARED_DIRECTORY / "generated").iterdir() } for feature_test in FEATURE_TESTS: generate(feature_test) for f in (feature_test / "generated").iterdir(): if f.name in shared_files and filecmp.cmp(f, shared_files[f.name]): f.unlink() f.symlink_to(f"../../shared/generated/{f.name}") return 0
def test_refinement_with_imported_enum_literal(tmp_path: Path) -> None: p = Parser() p.parse_string( """ package Numbers is type Protocol is (PROTO_X, PROTO_Y) with Size => 8; end Numbers; """ ) p.parse_string( """ with Numbers; package Proto is type Packet is message Protocol : Numbers::Protocol then Data with Size => 128; Data : Opaque; end message; end Proto; """ ) p.parse_string( """ with Proto; with Numbers; package In_Proto is type X is null message; for Proto::Packet use (Data => X) if Protocol = Numbers::PROTO_X; end In_Proto; """ ) utils.assert_compilable_code(p.create_model(), Integration(), tmp_path)
def test_rfi_add_integration(rfi_content: str, match_error: str) -> None: # pydantic messages end with the type of the error in parentheses. regex = re.compile( ( "^test.rfi:0:0: parser: error: 1 validation error for " rf"IntegrationFile.*{match_error} \([^()]*\)$" ), re.DOTALL, ) yaml = YAML() content = yaml.load(rfi_content) error = RecordFluxError() integration = Integration() with pytest.raises(RecordFluxError, match=regex): # pylint: disable = protected-access integration._add_integration_object(Path("test.rfi"), content, error) error.propagate()
def test_load_integration_path(tmp_path: Path) -> None: subfolder = tmp_path / "sub" subfolder.mkdir() test_rfi = subfolder / "test.rfi" test_rfi.write_text("{ Session: { Session : { Buffer_Size : 0 }}}") integration = Integration(integration_files_dir=subfolder) error = RecordFluxError() regex = re.compile( ( r"test.rfi:0:0: parser: error: 1 validation error for IntegrationFile.*" r"value is not a valid dict \(type=type_error.dict\)" ), re.DOTALL, ) with pytest.raises(RecordFluxError, match=regex): integration.load_integration_file(tmp_path / "test.rflx", error) error.propagate()
def assert_compilable_code_string(specification: str, tmp_path: pathlib.Path, prefix: str = None) -> None: parser = Parser() parser.parse_string(specification) assert_compilable_code(parser.create_model(), Integration(), tmp_path, prefix=prefix)
def assert_provable_code_string(specification: str, tmp_path: pathlib.Path, prefix: str = None, units: Sequence[str] = None) -> None: parser = Parser() parser.parse_string(specification) assert_provable_code(parser.create_model(), Integration(), tmp_path, prefix=prefix, units=units)
def assert_compilable_code_specs(spec_files: Iterable[Union[str, pathlib.Path]], tmp_path: pathlib.Path, prefix: str = None) -> None: parser = Parser() for spec_file in spec_files: parser.parse(pathlib.Path(spec_file)) assert_compilable_code(parser.create_model(), Integration(), tmp_path, prefix=prefix)
def test_sequence_with_imported_element_type_scalar(tmp_path: Path) -> None: p = Parser() p.parse_string( """ package Test is type T is mod 256; end Test; """ ) p.parse_string( """ with Test; package Sequence_Test is type T is sequence of Test::T; end Sequence_Test; """ ) utils.assert_compilable_code(p.create_model(), Integration(), tmp_path)
def test_sequence_with_imported_element_type_message(tmp_path: Path) -> None: p = Parser() p.parse_string( """ package Test is type M is message null then A with Size => 8; A : Opaque; end message; end Test; """ ) p.parse_string( """ with Test; package Sequence_Test is type T is sequence of Test::M; end Sequence_Test; """ ) utils.assert_compilable_code(p.create_model(), Integration(), tmp_path)
def test_rfi_get_size() -> None: integration = Integration() session_object = { "Session": { "S": { "Buffer_Size": { "Default": 1024, "Global": { "Y": 2048, "Z": 512, }, "Local": { "S": { "Y": 8192, } }, } } } } error = RecordFluxError() # pylint: disable = protected-access integration._add_integration_object(Path("p.rfi"), session_object, error) error.propagate() assert integration.get_size(ID("P::S"), None, None) == 1024 assert integration.get_size(ID("P::S"), ID("X"), ID("S")) == 1024 assert integration.get_size(ID("P::S"), ID("X"), ID("S")) == 1024 assert integration.get_size(ID("P::S"), ID("X"), None) == 1024 assert integration.get_size(ID("P::S2"), ID("X"), None) == 4096 assert integration.get_size(ID("P::S"), ID("Y"), None) == 2048 assert integration.get_size(ID("P::S"), ID("Y"), ID("S")) == 8192 assert integration.get_size(ID("P::S"), ID("Z"), None) == 512 assert integration.get_size(ID("P::S"), ID("Z"), ID("S")) == 512
def test_code_verification(tmp_path_factory: TempPathFactory, model: Model) -> None: utils.assert_provable_code(model, Integration(), tmp_path_factory.mktemp("code_verification"))