def _add_integration_object(self, filename: Path, file: object, error: RecordFluxError) -> None: try: self._packages[filename.stem] = IntegrationFile.parse_obj(file) except ValidationError as e: error.extend([(f"{e}", Subsystem.PARSER, Severity.ERROR, self._to_location(filename.stem))])
def check_type(self, declaration_type: rty.Type, typify_variable: Callable[[Expr], Expr]) -> RecordFluxError: self.type_ = declaration_type expression = self.expression.substituted(typify_variable) assert isinstance(expression, Selected) self.expression = expression error = self.expression.prefix.check_type_instance(rty.Message) if error.errors: return error assert isinstance(self.expression.prefix.type_, rty.Message) error = RecordFluxError() for r in self.expression.prefix.type_.refinements: if ID(r.field) == self.expression.selector and r.sdu.is_compatible( declaration_type): break else: error.extend([ ( f'invalid renaming to "{self.identifier}"', Subsystem.MODEL, Severity.ERROR, self.location, ), ( f'refinement for message "{self.expression.prefix.type_.identifier}"' " would make operation legal", Subsystem.MODEL, Severity.INFO, self.location, ), ], ) return error + self.expression.check_type(rty.OPAQUE)
def check_type( actual: Type, expected: ty.Union[Type, ty.Tuple[Type, ...]], location: ty.Optional[Location], description: str, ) -> RecordFluxError: assert expected, "empty expected types" if actual == Undefined(): return _undefined_type(location, description) error = RecordFluxError() expected_types = [expected] if isinstance(expected, Type) else list(expected) if Undefined() not in [actual, expected] and all( not actual.is_compatible(t) for t in expected_types): desc = (" or ".join(map(str, expected_types)) if isinstance( expected, tuple) else str(expected)) error.extend([ (f"expected {desc}", Subsystem.MODEL, Severity.ERROR, location), (f"found {actual}", Subsystem.MODEL, Severity.INFO, location), ], ) return error
def _validate_states(package: str, integration: SessionIntegration, session: Session, error: RecordFluxError) -> None: if integration.buffer_size.local_ is None: return for state_name, state_entry in integration.buffer_size.local_.items(): state = None for s in session.states: if str(s.identifier.name) == state_name: state = s if state is None: error.extend([( (f'unknown state "{state_name}" in session ' f'"{session.identifier.name}"'), Subsystem.PARSER, Severity.ERROR, Integration._to_location(package), )]) return state_declaration_vars = [ str(x.name) for x in state.declarations.keys() ] for var_name in state_entry.keys(): if var_name not in state_declaration_vars: error.extend([( (f'unknown variable "{var_name}" in state ' f'"{state_name}" of session "{session.identifier.name}"' ), Subsystem.PARSER, Severity.ERROR, Integration._to_location(package), )])
def _undefined_type(location: ty.Optional[Location], description: str = "") -> RecordFluxError: error = RecordFluxError() error.extend([( "undefined" + (f" {description}" if description else ""), Subsystem.MODEL, Severity.ERROR, location, )], ) return error
def _check_duplicates(self) -> RecordFluxError: error = RecordFluxError() types: Dict[ID, type_.Type] = {} sessions: Dict[ID, session.Session] = {} for t in self._types: if t.identifier in types: error.extend( [ ( f'conflicting refinement of "{t.pdu.identifier}" with' f' "{t.sdu.identifier}"' if isinstance(t, message.Refinement) else f'name conflict for type "{t.identifier}"', Subsystem.MODEL, Severity.ERROR, t.location, ), ( "previous occurrence of refinement" if isinstance(t, message.Refinement) else f'previous occurrence of "{t.identifier}"', Subsystem.MODEL, Severity.INFO, types[t.identifier].location, ), ], ) types[t.identifier] = t for s in self._sessions: if s.identifier in types or s.identifier in sessions: error.extend( [ ( f'name conflict for session "{s.identifier}"', Subsystem.MODEL, Severity.ERROR, s.location, ), ( f'previous occurrence of "{s.identifier}"', Subsystem.MODEL, Severity.INFO, types[s.identifier].location if s.identifier in types else sessions[s.identifier].location, ), ], ) sessions[s.identifier] = s return error
def create_message(message: MessageSpec, types: Mapping[ID, Type]) -> Message: components = list(message.components) if components and components[0].name: components.insert(0, Component()) field_types: Dict[Field, Type] = {} error = RecordFluxError() for component in components: if component.name and component.type_name: type_name = qualified_type_name(component.type_name, message.package) if type_name not in types: continue field_types[Field(component.name)] = types[type_name] structure: List[Link] = [] for i, component in enumerate(components): if not component.name: error.extend([( "invalid first expression", Subsystem.PARSER, Severity.ERROR, then.first.location, ) for then in component.thens if then.first != UNDEFINED]) source_node = Field(component.name) if component.name else INITIAL if not component.thens: name = components[i + 1].name if i + 1 < len(components) else None target_node = Field(name) if name else FINAL structure.append(Link(source_node, target_node)) for then in component.thens: target_node = Field(then.name) if then.name else FINAL if then.name and target_node not in field_types.keys(): error.append( f'undefined field "{then.name}"', Subsystem.PARSER, Severity.ERROR, then.name.location if then.name else None, ) continue structure.append( Link(source_node, target_node, then.condition, then.length, then.first, then.location)) return (UnprovenMessage(message.identifier, structure, field_types, message.location, error).merged().proven())
def _validate_globals(package: str, integration: SessionIntegration, session: Session, error: RecordFluxError) -> None: if integration.buffer_size.global_ is None: return session_decl_vars = [str(x.name) for x in session.declarations.keys()] for var_name in integration.buffer_size.global_.keys(): if var_name not in session_decl_vars: error.extend([( (f'unknown global variable "{var_name}" ' f'in session "{session.identifier.name}"'), Subsystem.PARSER, Severity.ERROR, Integration._to_location(package), )])
def __evaluate_types(self, spec: Specification, error: RecordFluxError) -> None: for t in spec.package.types: t.identifier = ID(f"{spec.package.identifier}.{t.name}", t.identifier.location) if t.identifier in self.__types: error.append( f'duplicate type "{t.identifier}"', Subsystem.PARSER, Severity.ERROR, t.location, ) error.append( f'previous occurrence of "{t.identifier}"', Subsystem.PARSER, Severity.INFO, self.__types[t.identifier].location, ) continue new_type: Type try: if isinstance(t, Scalar): new_type = t elif isinstance(t, ArraySpec): new_type = create_array(t, self.__types) elif isinstance(t, MessageSpec): new_type = create_message(t, self.__types) elif isinstance(t, DerivationSpec): new_type = create_derived_message(t, self.__types) elif isinstance(t, RefinementSpec): new_type = create_refinement(t, self.__types) else: raise NotImplementedError( f'unsupported type "{type(t).__name__}"') self.__types[t.identifier] = new_type error.extend(new_type.error) except RecordFluxError as e: error.extend(e)
def create_model(self) -> Model: error = RecordFluxError() for specification in self.__specifications: if specification.package.identifier in self.__evaluated_specifications: continue self.__evaluated_specifications.add( specification.package.identifier) try: self.__evaluate_specification(specification) except RecordFluxError as e: error.extend(e) try: result = Model(list(self.__types.values())) except RecordFluxError as e: error.extend(e) error.propagate() return result
def __parse(self, specfile: Path, transitions: List[Tuple[ID, ID]] = None) -> None: error = RecordFluxError() log.info("Parsing %s", specfile) if not transitions: transitions = [] with open(specfile, "r") as filehandle: push_source(specfile) try: for specification in grammar.unit().parseFile(filehandle): check_naming(error, specification.package, specfile.name) self.__specifications.appendleft(specification) for item in specification.context.items: transition = (specification.package.identifier, item) if transition in transitions: error.append( f'dependency cycle when including "{transitions[0][1]}"', Subsystem.PARSER, Severity.ERROR, transitions[0][1].location, ) error.extend([( f'when including "{i}"', Subsystem.PARSER, Severity.INFO, i.location, ) for _, i in transitions[1:]]) continue transitions.append(transition) self.__parse( specfile.parent / f"{str(item).lower()}.rflx", transitions) except (ParseException, ParseFatalException) as e: error.append( e.msg, Subsystem.PARSER, Severity.ERROR, parser_location(e.loc, e.loc, e.pstr, specfile), ) finally: pop_source() error.propagate()
def assert_error(filename: pathlib.Path, expected: Sequence[str]) -> None: p = specification.Parser() with pytest.raises(RecordFluxError) as excinfo: error = RecordFluxError() try: p.parse(filename) except RecordFluxError as e: error.extend(e) try: p.create_model() except RecordFluxError as e: error.extend(e) error.propagate() assert str(excinfo.value).split("\n") == expected
def _append( error: RecordFluxError, message: str, row: int, col: int, spec_file: Path, check_type: Check = None, ) -> None: error.extend( [ ( message + (f" [{check_type.value}]" if check_type else ""), Subsystem.STYLE, Severity.ERROR, Location((row, col), spec_file), ) ] )
def validate(self, model: Model, error: RecordFluxError) -> None: for package, integration_file in self._packages.items(): for session_name, integration in integration_file.session.items(): matching_sessions = [ s for s in model.sessions if package == str(s.package).lower() and str(s.identifier.name) == session_name ] if not matching_sessions: error.extend([( f'unknown session "{session_name}"', Subsystem.PARSER, Severity.ERROR, Integration._to_location(package), )]) return assert len(matching_sessions) == 1 session = matching_sessions[0] self._validate_globals(package, integration, session, error) self._validate_states(package, integration, session, error)
def parse(files: List) -> Model: parser = Parser() error = RecordFluxError() for f in files: if not Path(f).is_file(): error.append(f'file not found: "{f}"', Subsystem.CLI, Severity.ERROR) continue try: parser.parse(Path(f)) except RecordFluxError as e: error.extend(e) try: model = parser.create_model() except RecordFluxError as e: error.extend(e) error.propagate() return model
def load_integration_file(self, spec_file: Path, error: RecordFluxError) -> None: integration_file = (spec_file.with_suffix(".rfi") if self._integration_files_dir is None else self._integration_files_dir / (spec_file.stem + ".rfi")) if integration_file.exists(): yaml = YAML() try: content = yaml.load(integration_file) except MarkedYAMLError as e: location = Location( start=((0, 0) if e.problem_mark is None else (e.problem_mark.line + 1, e.problem_mark.column + 1)), source=integration_file, ) error.extend([(str(e), Subsystem.PARSER, Severity.ERROR, location)]) return self._add_integration_object(integration_file, content, error)
def parse( files: Sequence[Path], skip_verification: bool = False, workers: int = 1, integration_files_dir: Optional[Path] = None, ) -> Tuple[Model, Integration]: parser = Parser(skip_verification, cached=True, workers=workers, integration_files_dir=integration_files_dir) error = RecordFluxError() present_files = [] for f in files: if not f.is_file(): error.extend([(f'file not found: "{f}"', Subsystem.CLI, Severity.ERROR, None)]) continue present_files.append(Path(f)) try: parser.parse(*present_files) except RecordFluxError as e: error.extend(e) try: model = parser.create_model() except RecordFluxError as e: error.extend(e) error.propagate() return model, parser.get_integration()
def check_type_instance( actual: Type, expected: ty.Union[ty.Type[Type], ty.Tuple[ty.Type[Type], ...]], location: ty.Optional[Location], description: str = "", ) -> RecordFluxError: assert expected, "empty expected types" if actual == Undefined(): return _undefined_type(location, description) error = RecordFluxError() if not isinstance(actual, expected) and actual != Any(): desc = (" or ".join(e.DESCRIPTIVE_NAME for e in expected) if isinstance(expected, tuple) else expected.DESCRIPTIVE_NAME) error.extend([ (f"expected {desc}", Subsystem.MODEL, Severity.ERROR, location), (f"found {actual}", Subsystem.MODEL, Severity.INFO, location), ], ) return error
def _check_conflicts(self) -> RecordFluxError: error = RecordFluxError() for e1, e2 in [ (e1, e2) for i1, e1 in enumerate(self._types) for i2, e2 in enumerate(self._types) if ( isinstance(e1, type_.Enumeration) and isinstance(e2, type_.Enumeration) and i1 < i2 and ( e1.package == e2.package or e1.package == const.BUILTINS_PACKAGE or e2.package == const.BUILTINS_PACKAGE ) ) ]: identical_literals = set(e2.literals) & set(e1.literals) if identical_literals: literals_message = ", ".join([f"{l}" for l in sorted(identical_literals)]) error.extend( [ ( f"conflicting literals: {literals_message}", Subsystem.MODEL, Severity.ERROR, e2.location, ), *[ ( f'previous occurrence of "{l}"', Subsystem.MODEL, Severity.INFO, l.location, ) for l in sorted(identical_literals) ], ], ) literals = [ ID(t.package * l, location=l.location) for t in self._types if isinstance(t, type_.Enumeration) for l in t.literals ] name_conflicts = [ (l, t) for l in literals for t in self._types if (l.parent == t.package or type_.is_builtin_type(t.identifier)) and l.name == t.identifier.name ] for literal, conflicting_type in name_conflicts: error.extend( [ ( f'literal "{literal.name}" conflicts with type declaration', Subsystem.MODEL, Severity.ERROR, literal.location, ), ( f'conflicting type "{conflicting_type.identifier}"', Subsystem.MODEL, Severity.INFO, conflicting_type.location, ), ], ) return error