def p_Number(self, token): s = token.text # sign if s[0] == "-": s = s[1:] sign = -1 else: sign = 1 # base s = s.split("^^") if len(s) == 1: base, s = 10, s[0] else: assert len(s) == 2 base, s = int(s[0]), s[1] if not 2 <= base <= 36: self.tokeniser.feeder.message("General", "base", base, token.text, 36) self.tokeniser.sntx_message(token.pos) raise InvalidSyntaxError() # mantissa s = s.split("*^") if len(s) == 1: exp, s = 0, s[0] else: # TODO modify regex and provide error if `exp` is not an int exp, s = int(s[1]), s[0] # precision/accuracy s = s.split("`", 1) if len(s) == 1: s, suffix = s[0], None else: s, suffix = s[0], s[1] for i, c in enumerate(s.lower()): if permitted_digits[c] >= base: self.tokeniser.feeder.message("General", "digit", i + 1, s, base) self.tokeniser.sntx_message(token.pos) raise InvalidSyntaxError() result = Number(s, sign=sign, base=base, suffix=suffix, exp=exp) self.consume() return result
def expect(self, expected_tag): token = self.next_noend() if token.tag == expected_tag: self.consume() else: self.tokeniser.sntx_message(token.pos) raise InvalidSyntaxError()
def p_Information(self, token): self.consume() q = prefix_ops["Information"] child = self.parse_exp(q) if child.__class__ is not Symbol: raise InvalidSyntaxError() return Node("Information", child, Node("Rule", Symbol("LongForm"), Symbol("True")))
def parse_p(self): token = self.next_noend() tag = token.tag method = getattr(self, "p_" + tag, None) if method is not None: return method(token) elif tag in prefix_ops: self.consume() q = prefix_ops[tag] child = self.parse_exp(q) return Node(tag, child) else: self.tokeniser.sntx_message(token.pos) raise InvalidSyntaxError()
def e_MessageName(self, expr1, token, p): leaves = [expr1] while self.next().tag == "MessageName": self.consume() token = self.next() if token.tag == "Symbol": # silently convert Symbol to String self.consume() leaf = String(token.text) elif token.tag == "String": leaf = self.p_String(token) else: self.tokeniser.sntx_message(token.pos) raise InvalidSyntaxError() leaves.append(leaf) return Node("MessageName", *leaves)
def parse_binary(self, expr1, token, p): tag = token.tag q = binary_ops[tag] if q < p: return None self.consume() if tag not in right_binary_ops: q += 1 expr2 = self.parse_exp(q) # flatten or associate if (tag in nonassoc_binary_ops and expr1.get_head_name() == tag and not expr1.parenthesised): self.tokeniser.sntx_message(token.pos) raise InvalidSyntaxError() result = Node(tag, expr1, expr2) if tag in flat_binary_ops: result.flatten() return result
def e_TagSet(self, expr1, token, p): q = all_ops["Set"] if q < p: return None self.consume() expr2 = self.parse_exp(q + 1) # examine next token token = self.next_noend() tag = token.tag if tag == "Set": head = "TagSet" elif tag == "SetDelayed": head = "TagSetDelayed" elif tag == "Unset": head = "TagUnset" else: self.tokeniser.sntx_message(token.pos) raise InvalidSyntaxError() self.consume() if head == "TagUnset": return Node(head, expr1, expr2) expr3 = self.parse_exp(q + 1) return Node(head, expr1, expr2, expr3)