def __init__(self, identifier: Union[str, Sequence[str], ID], location: Location = None) -> None: self._parts: Sequence[str] self.location = location if isinstance(identifier, str): self._parts = re.split(r"\.|::", identifier) elif isinstance(identifier, Sequence): self._parts = identifier elif isinstance(identifier, ID): self._parts = list(identifier.parts) self.location = location or identifier.location else: assert False, f'unexpected identifier type "{type(identifier).__name__}"' error = RecordFluxError() if not self._parts: fatal_fail("empty identifier", Subsystem.ID, Severity.ERROR, location) elif "" in self._parts: fatal_fail(f'empty part in identifier "{self}"', Subsystem.ID, Severity.ERROR, location) else: for c in [" ", ".", ":"]: if any(c in part for part in self._parts): fatal_fail( f'"{c}" in identifier parts of "{self}"', Subsystem.ID, Severity.ERROR, location, ) error.propagate()
def test_rfi_get_size() -> None: integration = Integration() session_object = { "Session": { "S": { "Buffer_Size": { "Default": 1024, "Global": { "Y": 2048, "Z": 512, }, "Local": { "S": { "Y": 8192, } }, } } } } error = RecordFluxError() # pylint: disable = protected-access integration._add_integration_object(Path("p.rfi"), session_object, error) error.propagate() assert integration.get_size(ID("P::S"), None, None) == 1024 assert integration.get_size(ID("P::S"), ID("X"), ID("S")) == 1024 assert integration.get_size(ID("P::S"), ID("X"), ID("S")) == 1024 assert integration.get_size(ID("P::S"), ID("X"), None) == 1024 assert integration.get_size(ID("P::S2"), ID("X"), None) == 4096 assert integration.get_size(ID("P::S"), ID("Y"), None) == 2048 assert integration.get_size(ID("P::S"), ID("Y"), ID("S")) == 8192 assert integration.get_size(ID("P::S"), ID("Z"), None) == 512 assert integration.get_size(ID("P::S"), ID("Z"), ID("S")) == 512
def parse( files: Sequence[Path], skip_verification: bool = False, workers: int = 1, integration_files_dir: Optional[Path] = None, ) -> Tuple[Model, Integration]: parser = Parser(skip_verification, cached=True, workers=workers, integration_files_dir=integration_files_dir) error = RecordFluxError() present_files = [] for f in files: if not f.is_file(): error.extend([(f'file not found: "{f}"', Subsystem.CLI, Severity.ERROR, None)]) continue present_files.append(Path(f)) try: parser.parse(*present_files) except RecordFluxError as e: error.extend(e) try: model = parser.create_model() except RecordFluxError as e: error.extend(e) error.propagate() return model, parser.get_integration()
def parse_unproven_session(string: str) -> model.UnprovenSession: unit = lang.AnalysisContext().get_from_buffer( "<stdin>", string, rule=lang.GrammarRule.session_declaration_rule ) error = RecordFluxError() if diagnostics_to_error(unit.diagnostics, error, STDIN): error.propagate() assert isinstance(unit.root, lang.SessionDecl) return create_unproven_session(unit.root, ID("Package"), Path("<stdin>"))
def parse( data: str, rule: str, ) -> Tuple[lang.RFLXNode, pathlib.Path]: unit = lang.AnalysisContext().get_from_buffer("<stdin>", data, rule=rule) error = RecordFluxError() if diagnostics_to_error(unit.diagnostics, error, STDIN): error.propagate() assert isinstance(unit.root, lang.RFLXNode) return (unit.root, STDIN)
def parse_string(self, string: str) -> None: error = RecordFluxError() try: for specification in grammar.unit().parseString(string): self.__specifications.appendleft(specification) check_naming(error, specification.package) except (ParseException, ParseFatalException) as e: error.append( e.msg, Subsystem.PARSER, Severity.ERROR, parser_location(e.loc, e.loc, e.pstr), ) error.propagate()
def test_load_integration_file( tmp_path: Path, content: str, error_msg: Sequence[str], line: int, column: int ) -> None: test_rfi = tmp_path / "test.rfi" test_rfi.write_text(content) integration = Integration() error = RecordFluxError() regex = rf"^{test_rfi}:{line}:{column}: parser: error: " for elt in error_msg: regex += elt regex += rf'.*in "{test_rfi}", line [0-9]+, column [0-9]+.*' regex += "$" compiled_regex = re.compile(regex, re.DOTALL) with pytest.raises(RecordFluxError, match=compiled_regex): integration.load_integration_file(test_rfi, error) error.propagate()
def test_rfi_add_integration(rfi_content: str, match_error: str) -> None: # pydantic messages end with the type of the error in parentheses. regex = re.compile( ( "^test.rfi:0:0: parser: error: 1 validation error for " rf"IntegrationFile.*{match_error} \([^()]*\)$" ), re.DOTALL, ) yaml = YAML() content = yaml.load(rfi_content) error = RecordFluxError() integration = Integration() with pytest.raises(RecordFluxError, match=regex): # pylint: disable = protected-access integration._add_integration_object(Path("test.rfi"), content, error) error.propagate()
def test_load_integration_path(tmp_path: Path) -> None: subfolder = tmp_path / "sub" subfolder.mkdir() test_rfi = subfolder / "test.rfi" test_rfi.write_text("{ Session: { Session : { Buffer_Size : 0 }}}") integration = Integration(integration_files_dir=subfolder) error = RecordFluxError() regex = re.compile( ( r"test.rfi:0:0: parser: error: 1 validation error for IntegrationFile.*" r"value is not a valid dict \(type=type_error.dict\)" ), re.DOTALL, ) with pytest.raises(RecordFluxError, match=regex): integration.load_integration_file(tmp_path / "test.rflx", error) error.propagate()
def __parse(self, specfile: Path, transitions: List[Tuple[ID, ID]] = None) -> None: error = RecordFluxError() log.info("Parsing %s", specfile) if not transitions: transitions = [] with open(specfile, "r") as filehandle: push_source(specfile) try: for specification in grammar.unit().parseFile(filehandle): check_naming(error, specification.package, specfile.name) self.__specifications.appendleft(specification) for item in specification.context.items: transition = (specification.package.identifier, item) if transition in transitions: error.append( f'dependency cycle when including "{transitions[0][1]}"', Subsystem.PARSER, Severity.ERROR, transitions[0][1].location, ) error.extend([( f'when including "{i}"', Subsystem.PARSER, Severity.INFO, i.location, ) for _, i in transitions[1:]]) continue transitions.append(transition) self.__parse( specfile.parent / f"{str(item).lower()}.rflx", transitions) except (ParseException, ParseFatalException) as e: error.append( e.msg, Subsystem.PARSER, Severity.ERROR, parser_location(e.loc, e.loc, e.pstr, specfile), ) finally: pop_source() error.propagate()
def create_model(self) -> Model: error = RecordFluxError() for specification in self.__specifications: if specification.package.identifier in self.__evaluated_specifications: continue self.__evaluated_specifications.add( specification.package.identifier) try: self.__evaluate_specification(specification) except RecordFluxError as e: error.extend(e) try: result = Model(list(self.__types.values())) except RecordFluxError as e: error.extend(e) error.propagate() return result
def create_derived_message(derivation: DerivationSpec, types: Mapping[ID, Type]) -> Message: base_name = qualified_type_name(derivation.base, derivation.package) messages = message_types(types) error = RecordFluxError() if base_name not in types: fail( f'undefined base message "{base_name}" in derived message', Subsystem.PARSER, Severity.ERROR, derivation.location, ) if base_name not in messages: error.append( f'illegal derivation "{derivation.identifier}"', Subsystem.PARSER, Severity.ERROR, derivation.location, ) error.append( f'invalid base message type "{base_name}"', Subsystem.PARSER, Severity.INFO, types[base_name].location, ) error.propagate() base = messages[base_name] if isinstance(base, DerivedMessage): error.append( f'illegal derivation "{derivation.identifier}"', Subsystem.PARSER, Severity.ERROR, derivation.location, ) error.append(f'invalid base message "{base_name}"', Subsystem.PARSER, Severity.INFO, base.location) error.propagate() return (UnprovenDerivedMessage( derivation.identifier, base, location=derivation.location).merged().proven())
def assert_error(filename: pathlib.Path, expected: Sequence[str]) -> None: p = specification.Parser() with pytest.raises(RecordFluxError) as excinfo: error = RecordFluxError() try: p.parse(filename) except RecordFluxError as e: error.extend(e) try: p.create_model() except RecordFluxError as e: error.extend(e) error.propagate() assert str(excinfo.value).split("\n") == expected
def parse(files: List) -> Model: parser = Parser() error = RecordFluxError() for f in files: if not Path(f).is_file(): error.append(f'file not found: "{f}"', Subsystem.CLI, Severity.ERROR) continue try: parser.parse(Path(f)) except RecordFluxError as e: error.extend(e) try: model = parser.create_model() except RecordFluxError as e: error.extend(e) error.propagate() return model
def create_refinement(refinement: RefinementSpec, types: Mapping[ID, Type]) -> Refinement: messages = message_types(types) refinement.pdu = qualified_type_name(refinement.pdu, refinement.package) if refinement.pdu not in messages: fail( f'undefined type "{refinement.pdu}" in refinement', Subsystem.PARSER, Severity.ERROR, refinement.location, ) pdu = messages[refinement.pdu] error = RecordFluxError() for variable in refinement.condition.variables(): literals = [ l for e in pdu.types.values() if isinstance(e, Enumeration) for l in e.literals.keys() ] + [ e.package * l for e in types.values() if isinstance(e, Enumeration) for l in e.literals.keys() ] if Field(str(variable.name) ) not in pdu.fields and variable.identifier not in literals: error.append( f'unknown field or literal "{variable.identifier}" in refinement' f' condition of "{refinement.pdu}"', Subsystem.PARSER, Severity.ERROR, variable.location, ) if Field(refinement.field) not in pdu.fields: error.append( f'invalid field "{refinement.field}" in refinement', Subsystem.PARSER, Severity.ERROR, refinement.field.location, ) error.propagate() refinement.sdu = qualified_type_name(refinement.sdu, refinement.package) if refinement.sdu not in messages: error.append( f'undefined type "{refinement.sdu}" in refinement of "{refinement.pdu}"', Subsystem.PARSER, Severity.ERROR, refinement.sdu.location, ) error.propagate() sdu = messages[refinement.sdu] result = Refinement( refinement.package, pdu, Field(refinement.field), sdu, refinement.condition, refinement.location, ) result.error.extend(error) if result in types.values(): result.error.append( f'duplicate refinement with "{refinement.sdu}"', Subsystem.PARSER, Severity.ERROR, refinement.location, ) result.error.append( "previous occurrence", Subsystem.PARSER, Severity.INFO, types[result.identifier].location, ) return result
def __evaluate_specification(self, specification: Specification) -> None: log.info("Processing %s", specification.package.identifier) error = RecordFluxError() self.__evaluate_types(specification, error) error.propagate()