def err(self, exp_token=None, msg=None, debug=False): if debug: raise ParserDebugError(msg, *self.curr_token.get_char_info(), exp_token, self.curr_token.type) else: raise ParserError(msg, *self.curr_token.get_char_info(), exp_token, self.curr_token.type)
def parseConstraint(self, raw): constraint = Constraint() regex = re.compile(\ r"(?P<package>[-a-zA-Z0-9+.]+)(\s\((?P<operator>\S{1,2})\s(?P<version>\S+?)\))?(\s\[(?P<arches>.+)\])?") match = regex.match(raw.strip()) if match is not None and match.group('package') is not None: constraint.package = UnversionedBinaryPackage( match.group('package')) if match.group('operator') is not None and match.group( 'version') is not None: constraint.operator = match.group('operator') constraint.version = self.parseVersionNumber( match.group('version')) if match.group('arches') is not None: split = match.group('arches').split() for arch in split: if arch.startswith("!"): constraint.exceptin.append(Architecture(arch[1:])) else: constraint.onlyin.append(Architecture(arch)) else: raise ParserError("parseConstraint", raw) return constraint
def error(self, error_code: ErrorCode, token: Token): print(str(error_code)) raise ParserError( error_code=error_code, token=token, message=f'{str(error_code.value)} -> {str(token)}', )
def parseArea(self, raw): regex = re.compile( r"^(pool|dists/[a-z]+)/(?P<area>(main|non-free|contrib))/.+?/.+?") match = regex.match(raw) if match is not None and match.group("area") is not None: return AreaBox.get(match.group("area")) else: raise ParserError("parseArea", raw)
def node(self, p): if p.NAME not in lll_ast.NODES: raise ParserError(f"Unsupported node type '{p.NAME}'") node_class = lll_ast.NODES[p.NAME] # NOTE: subclass of `Seq` is temporary until they're removed if node_class == lll_ast.Seq or issubclass(node_class, lll_ast.Seq): return node_class(list(p.args)) else: return node_class(*p.args)
def form_sexp(self): """Form an S-expression from lexical tokens""" if len(self._tokens) == 0: raise ParserError("expected an (' but end of string") tok = self._tokens.pop(0) if tok.value == '(': # S-expression L = SExp(tok, self.new_id()) while self._tokens[0].value != ')': L.append(self.form_sexp()) self._tokens.pop(0) # delete ')' return L elif tok.type == 'ID': return SSymbol(tok, self.new_id()) elif tok.type == 'INT': return SInt(tok, self.new_id()) elif tok.type == 'BOOL': return SBool(tok, self.new_id()) else: raise ParserError("Unrecognized token '%s' at line %d, col %d" % (tok.raw, tok.lineno, tok.colno))
def parseContributor(self, raw): regex = re.compile(r"(?P<name>.*?)\s*\<(?P<email>\S+)\>") match = regex.match(raw) if match is not None and match.group("name") is not None \ and match.group("email") is not None: if self.opts.role: return guessRole(match.group("name"), match.group("email")) else: return Contributor(match.group("name") if match.group("name") else \ None, match.group("email")) else: raise ParserError("parseContributor", raw)
def error(self, p): starting_index = p.index num_parens = 1 # Try to seek our way out of it while num_parens > 0: try: tok = next(self.tokens) except StopIteration as e: raise ParserError(f"Syntax error @ {starting_index}:\n " + self._source[starting_index:] + "\n---^") from e if tok.type == "LPAREN": num_parens += 1 elif tok.type == "RPAREN": num_parens -= 1 ending_index = tok.index raise ParserError( f"Syntax error @ {starting_index}:{ending_index}:\n " + self._source[starting_index:ending_index] + "\n---^") self.restart()
def parseSource(self, raw, binary): if "Source" in raw: regex = re.compile( r"(?P<srcname>[-a-zA-Z0-9+.]+)(\s*\((?P<version>\S+?)\))?") match = regex.match(raw['Source']) if match is not None: if match.group("version") is not None: srcversion = self.parseVersionNumber( match.group("version")) return SourcePackage(match.group("srcname"), srcversion) else: return SourcePackage(match.group("srcname"), binary.version) else: raise ParserError("parseSource", raw['Source']) else: return SourcePackage(binary.package, binary.version)
def parseTags(self, raw): split = raw.split(", ") # FIXME regex = re.compile(\ r"(?P<facet>[a-zA-Z0-9-]+)::(\{(?P<tags>\S+)\}|(?P<tag>[a-zA-Z0-9-:]+))") tags = [] for rtag in split: match = regex.match(rtag) if match is not None and match.group('facet') is not None: facet = match.group('facet') if match.group('tag') is not None: tags.append(Tag(facet, match.group('tag'))) elif match.group('tags') is not None: for t in match.group('tags').split(","): tags.append(Tag(facet, t)) else: raise ParserError("parseTags", raw) return tags
def parseVersionNumber(self, raw): try: return VersionNumber(raw.strip()) except ValueError: raise ParserError("parseVersionNumber", raw)
def error(self, error_code=None, token=None): raise ParserError(error_code=error_code, token=token, message=f"{error_code.value} -> {token}")
def p_error(self, p): print(dir(p)) print(p.lexpos, p.lineno, p.type, p.value) raise ParserError("Syntax error in input data: %s" % p.type)