def token_base_name(self, token): """ Helper function to get the name of a token. :param TokenAction|Enum|Name|str token: Input token. It can be either a TokenAction subclass (i.e. a Lexer subclass attribute), an enum value from "self.tokens", the token Name or a string (case insensitive token name). :rtype: Name """ if isinstance(token, TokenAction): return token.name elif isinstance(token, Name): assert token in self.tokens_set return token else: assert isinstance( token, str), ("Bad type for {}, supposed to be str|{}".format( token, self.tokens.__name__)) name = Name.from_lower(token.lower()) if name in self.tokens_set: return name elif token in self.literals_map: return self.literals_map[token].name else: check_source_language( False, "{} token literal is not part of the valid tokens for " "this grammar".format(token))
def token_base_name(self, token): """ Helper function to get the name of a token. :param TokenAction|Enum|Name|str token: Input token. It can be either a TokenAction subclass (i.e. a Lexer subclass attribute), an enum value from "self.tokens_class", the token Name or a string (case insensitive token name). :rtype: Name """ if isinstance(token, TokenAction): return token.name elif isinstance(token, Name): assert token in self.tokens_set return token else: assert isinstance(token, str), ( "Bad type for {}, supposed to be str|{}".format( token, self.tokens_class.__name__ ) ) name = Name.from_lower(token.lower()) if name in self.tokens_set: return name elif token in self.literals_map: return self.literals_map[token].name else: raise Exception( "{} token literal is not part of the valid tokens for " "this grammar".format(token) )
def adaify_name(context: Context, name: str) -> str: """ Turn a symbol name like a__b into an Ada-like name such as A.B. Also strip the $LIB_NAME.Analysis prefix, if present. """ pfx = context.analysis_prefix if name.startswith(pfx): name = name[len(pfx):] chunks = name.split('__') return '.'.join(Name.from_lower(c).camel_with_underscores for c in chunks)