def parse_then(string: str, location: int, tokens: ParseResults) -> Then: tokens = tokens[0] start = tokens.pop(0) locn = parser_location(start, tokens[-1], string) return Then( tokens[1] if tokens[1] != "null" else None, tokens[2][0]["first"] if tokens[2] and "first" in tokens[2][0] else UNDEFINED, tokens[2][0]["length"] if tokens[2] and "length" in tokens[2][0] else UNDEFINED, tokens[3][0] if tokens[3] else BooleanTrue(location=locn), locn, )
def parse_string(self, string: str) -> None: error = RecordFluxError() try: for specification in grammar.unit().parseString(string): self.__specifications.appendleft(specification) check_naming(error, specification.package) except (ParseException, ParseFatalException) as e: error.append( e.msg, Subsystem.PARSER, Severity.ERROR, parser_location(e.loc, e.loc, e.pstr), ) error.propagate()
def __parse(self, specfile: Path, transitions: List[Tuple[ID, ID]] = None) -> None: error = RecordFluxError() log.info("Parsing %s", specfile) if not transitions: transitions = [] with open(specfile, "r") as filehandle: push_source(specfile) try: for specification in grammar.unit().parseFile(filehandle): check_naming(error, specification.package, specfile.name) self.__specifications.appendleft(specification) for item in specification.context.items: transition = (specification.package.identifier, item) if transition in transitions: error.append( f'dependency cycle when including "{transitions[0][1]}"', Subsystem.PARSER, Severity.ERROR, transitions[0][1].location, ) error.extend([( f'when including "{i}"', Subsystem.PARSER, Severity.INFO, i.location, ) for _, i in transitions[1:]]) continue transitions.append(transition) self.__parse( specfile.parent / f"{str(item).lower()}.rflx", transitions) except (ParseException, ParseFatalException) as e: error.append( e.msg, Subsystem.PARSER, Severity.ERROR, parser_location(e.loc, e.loc, e.pstr, specfile), ) finally: pop_source() error.propagate()
def numeric_literal() -> Token: numeral = Combine( Word(nums) + ZeroOrMore(Optional(Word("_")) + Word(nums))) numeral.setParseAction(lambda t: t[0].replace("_", "")) decimal_literal = Group(numeral) decimal_literal.setParseAction(lambda t: (int(t[0][0]), 0)) extended_digit = Word(nums + "ABCDEF") based_numeral = Combine(extended_digit + ZeroOrMore(Optional("_") + extended_digit)) based_numeral.setParseAction(lambda t: t[0].replace("_", "")) based_literal = numeral + Literal("#") - based_numeral - Literal("#") based_literal.setParseAction(lambda t: (int(t[2], int(t[0])), int(t[0]))) num_literal = based_literal | decimal_literal num_literal.setName("Number") return locatedExpr(num_literal).setParseAction(lambda s, l, t: Number( t[0][1][0], t[0][1][1], parser_location(t[0][0], t[0][2], s)))
def parse_type(string: str, location: int, tokens: ParseResults) -> Type: package = ID("__PACKAGE__") name = tokens[1] locn = parser_location(tokens[0], tokens[-1], string) identifier = package * name if tokens[3] == "mod": return ModularInteger(identifier, tokens[4], locn) if tokens[3] == "range": tokens[6] = tokens[6]["size"] return RangeInteger(identifier, tokens[4], tokens[5], tokens[6], locn) if tokens[3] == "message": return MessageSpec(identifier, tokens[4], locn) if tokens[3] == "null message": return MessageSpec(identifier, [], locn) if tokens[3] == "(": elements = tokens[4:-3] aspects = tokens[-2] if "always_valid" not in aspects: aspects["always_valid"] = False enumeration = Enumeration(identifier, elements, aspects["size"], aspects["always_valid"], locn) return enumeration if tokens[3] == "new": return DerivationSpec(identifier, tokens[4], locn) if tokens[3] == "array of": return ArraySpec( identifier, ReferenceSpec(qualified_type_name(tokens[4], package), tokens[4].location), locn, ) raise ParseFatalException(string, location, "unexpected type")
def parse_refinement(string: str, location: int, tokens: ParseResults) -> RefinementSpec: constraint = tokens[4] if "constraint" in tokens else TRUE locn = parser_location(tokens[0], tokens[-1], string) return RefinementSpec(tokens[1], tokens[2], tokens[3], constraint, locn)
def verify_identifier(string: str, location: int, tokens: ParseResults) -> ID: reserved_words = [ "abort", "abs", "abstract", "accept", "access", "aliased", "all", "and", "array", "at", "begin", "body", "case", "constant", "declare", "delay", "delta", "digits", "do", "else", "elsif", "end", "entry", "exception", "exit", "for", "function", "generic", "goto", "if", "in", "interface", "is", "limited", "loop", "mod", "new", "not", "null", "of", "or", "others", "out", "overriding", "package", "pragma", "private", "procedure", "protected", "raise", "range", "record", "rem", "renames", "requeue", "return", "reverse", "select", "separate", "some", "subtype", "synchronized", "tagged", "task", "terminate", "then", "type", "until", "use", "when", "while", "with", "xor", "initial", "final", ] data = tokens[0].asDict() tokens = data["value"] locn = parser_location(data["locn_start"], data["locn_end"], string) if tokens[0].lower() in reserved_words: fail( f'reserved word "{tokens[0]}" used as identifier', Subsystem.PARSER, Severity.ERROR, locn, ) return ID(tokens[0], locn)
def parse_array_aggregate(string: str, location: int, tokens: ParseResults) -> Expr: return Aggregate(*tokens[1:-1], location=parser_location(tokens[0], tokens[-1], string))