def generate(args: argparse.Namespace) -> None: # https://github.com/Componolit/Workarounds/issues/28 args.prefix = args.prefix if args.prefix != " " else "" if args.prefix and "" in args.prefix.split("."): fail(f'invalid prefix: "{args.prefix}"', Subsystem.CLI) if not args.output_directory.is_dir(): fail(f'directory not found: "{args.output_directory}"', Subsystem.CLI) model, integration = parse(args.files, args.no_verification, args.workers, args.integration_files_dir) Generator( args.prefix, workers=args.workers, reproducible=os.environ.get("RFLX_REPRODUCIBLE") is not None, debug=Debug.BUILTIN if args.debug == "built-in" else Debug.EXTERNAL if args.debug == "external" else Debug.NONE, ignore_unsupported_checksum=args.ignore_unsupported_checksum, ).generate( model, integration, args.output_directory, library_files=not args.no_library, top_level_package=args.prefix == DEFAULT_PREFIX, )
def graph(args: argparse.Namespace) -> None: directory = Path(args.directory) if not directory.is_dir(): fail(f'directory not found: "{directory}"', Subsystem.GRAPH) model = parse(args.files) for m in model.messages: message = flat_name(m.full_name) filename = Path(directory).joinpath(message).with_suffix( f".{args.format}") Graph(m).write(filename, fmt=args.format)
def create_array(array: ArraySpec, types: Mapping[ID, Type]) -> Array: array.element_type.identifier = ID( array.element_type.full_name.replace("__PACKAGE__", str(array.package)), array.location) if array.element_type.identifier in types: element_type = types[array.element_type.identifier] else: fail( f'undefined element type "{array.element_type.identifier}"', Subsystem.PARSER, Severity.ERROR, array.element_type.location, ) return Array(array.identifier, element_type, array.location)
def create_derived_message(derivation: DerivationSpec, types: Mapping[ID, Type]) -> Message: base_name = qualified_type_name(derivation.base, derivation.package) messages = message_types(types) error = RecordFluxError() if base_name not in types: fail( f'undefined base message "{base_name}" in derived message', Subsystem.PARSER, Severity.ERROR, derivation.location, ) if base_name not in messages: error.append( f'illegal derivation "{derivation.identifier}"', Subsystem.PARSER, Severity.ERROR, derivation.location, ) error.append( f'invalid base message type "{base_name}"', Subsystem.PARSER, Severity.INFO, types[base_name].location, ) error.propagate() base = messages[base_name] if isinstance(base, DerivedMessage): error.append( f'illegal derivation "{derivation.identifier}"', Subsystem.PARSER, Severity.ERROR, derivation.location, ) error.append(f'invalid base message "{base_name}"', Subsystem.PARSER, Severity.INFO, base.location) error.propagate() return (UnprovenDerivedMessage( derivation.identifier, base, location=derivation.location).merged().proven())
def graph(args: argparse.Namespace) -> None: if not args.output_directory.is_dir(): fail(f'directory not found: "{args.output_directory}"', Subsystem.CLI) model, _ = parse(args.files, args.no_verification) for m in model.messages: filename = args.output_directory.joinpath( m.identifier.flat).with_suffix(f".{args.format}") write_graph(create_message_graph(m), filename, fmt=args.format) for s in model.sessions: filename = args.output_directory.joinpath( s.identifier.flat).with_suffix(f".{args.format}") write_graph(create_session_graph(s, args.ignore), filename, fmt=args.format) locations: Dict[str, Dict[str, Dict[str, Dict[str, int]]]] = { str(m.location.source): { m.identifier.flat: { "start": { "line": m.location.start[0], "column": m.location.start[1] }, "end": { "line": m.location.end[0], "column": m.location.end[1] }, } } for m in [*model.messages, *model.sessions] if isinstance(m, (Message, Session)) and m.location and m.location.start and m.location.end } filename = args.output_directory.joinpath("locations.json") with open(filename, "w", encoding="utf-8") as f: json.dump(locations, f)
def validate(args: argparse.Namespace) -> None: if args.valid_samples_directory is None and args.invalid_samples_directory is None: fail("must provide directory with valid and/or invalid messages", Subsystem.CLI) for path in [args.valid_samples_directory, args.invalid_samples_directory]: if path is not None and not path.is_dir(): fail(f"{path} does not exist or is not a directory", Subsystem.CLI) if args.output_file is not None and args.output_file.exists(): fail(f"output file already exists: {args.output_file}", Subsystem.CLI) try: identifier = ID(args.message_identifier) except FatalError as e: fail(f"invalid identifier: {e}", Subsystem.CLI) try: Validator( [args.specification], args.checksum_module, args.no_verification, args.split_disjunctions, ).validate( identifier, args.invalid_samples_directory, args.valid_samples_directory, args.output_file, args.abort_on_error, args.coverage, args.target_coverage, ) except ValidationError as e: fail(str(e), Subsystem.VALIDATOR) except PyRFLXError as e: fatal_error = FatalError() fatal_error.extend(e) raise fatal_error from e
def generate(args: argparse.Namespace) -> None: # WORKAROUND: Componolit/Workarounds#28 args.prefix = args.prefix if args.prefix != " " else "" if args.prefix and "" in args.prefix.split("."): fail(f'invalid prefix: "{args.prefix}"', Subsystem.CLI) directory = Path(args.directory) if not directory.is_dir(): fail(f'directory not found: "{directory}"', Subsystem.CLI) generator = Generator(args.prefix, reproducible=os.environ.get("RFLX_REPRODUCIBLE") is not None) model = parse(args.files) generator.generate(model) generator.write_units(directory) if not args.no_library: generator.write_library_files(directory) if args.prefix == DEFAULT_PREFIX: generator.write_top_level_package(directory)
def raise_model_error() -> None: fail("TEST", Subsystem.MODEL, Severity.ERROR, Location((8, 22)))
def raise_parser_error() -> None: fail("TEST", Subsystem.PARSER, Severity.ERROR, Location((8, 22)))
def create_refinement(refinement: RefinementSpec, types: Mapping[ID, Type]) -> Refinement: messages = message_types(types) refinement.pdu = qualified_type_name(refinement.pdu, refinement.package) if refinement.pdu not in messages: fail( f'undefined type "{refinement.pdu}" in refinement', Subsystem.PARSER, Severity.ERROR, refinement.location, ) pdu = messages[refinement.pdu] error = RecordFluxError() for variable in refinement.condition.variables(): literals = [ l for e in pdu.types.values() if isinstance(e, Enumeration) for l in e.literals.keys() ] + [ e.package * l for e in types.values() if isinstance(e, Enumeration) for l in e.literals.keys() ] if Field(str(variable.name) ) not in pdu.fields and variable.identifier not in literals: error.append( f'unknown field or literal "{variable.identifier}" in refinement' f' condition of "{refinement.pdu}"', Subsystem.PARSER, Severity.ERROR, variable.location, ) if Field(refinement.field) not in pdu.fields: error.append( f'invalid field "{refinement.field}" in refinement', Subsystem.PARSER, Severity.ERROR, refinement.field.location, ) error.propagate() refinement.sdu = qualified_type_name(refinement.sdu, refinement.package) if refinement.sdu not in messages: error.append( f'undefined type "{refinement.sdu}" in refinement of "{refinement.pdu}"', Subsystem.PARSER, Severity.ERROR, refinement.sdu.location, ) error.propagate() sdu = messages[refinement.sdu] result = Refinement( refinement.package, pdu, Field(refinement.field), sdu, refinement.condition, refinement.location, ) result.error.extend(error) if result in types.values(): result.error.append( f'duplicate refinement with "{refinement.sdu}"', Subsystem.PARSER, Severity.ERROR, refinement.location, ) result.error.append( "previous occurrence", Subsystem.PARSER, Severity.INFO, types[result.identifier].location, ) return result
def raise_parser_error() -> None: fail("TEST", Subsystem.PARSER, Severity.ERROR)
def verify_identifier(string: str, location: int, tokens: ParseResults) -> ID: reserved_words = [ "abort", "abs", "abstract", "accept", "access", "aliased", "all", "and", "array", "at", "begin", "body", "case", "constant", "declare", "delay", "delta", "digits", "do", "else", "elsif", "end", "entry", "exception", "exit", "for", "function", "generic", "goto", "if", "in", "interface", "is", "limited", "loop", "mod", "new", "not", "null", "of", "or", "others", "out", "overriding", "package", "pragma", "private", "procedure", "protected", "raise", "range", "record", "rem", "renames", "requeue", "return", "reverse", "select", "separate", "some", "subtype", "synchronized", "tagged", "task", "terminate", "then", "type", "until", "use", "when", "while", "with", "xor", "initial", "final", ] data = tokens[0].asDict() tokens = data["value"] locn = parser_location(data["locn_start"], data["locn_end"], string) if tokens[0].lower() in reserved_words: fail( f'reserved word "{tokens[0]}" used as identifier', Subsystem.PARSER, Severity.ERROR, locn, ) return ID(tokens[0], locn)