예제 #1
0
파일: tokenize.py 프로젝트: xxh/xonsh
HAS_WALRUS = PYTHON_VERSION_INFO > (3, 8)
if HAS_WALRUS:
    from token import COLONEQUAL

cookie_re = LazyObject(
    lambda: re.compile(r"^[ \t\f]*#.*coding[:=][ \t]*([-\w.]+)", re.ASCII),
    globals(),
    "cookie_re",
)
blank_re = LazyObject(lambda: re.compile(br"^[ \t\f]*(?:[#\r\n]|$)", re.ASCII),
                      globals(), "blank_re")

#
# token modifications
#
tok_name = tok_name.copy()
__all__ = token.__all__ + [
    "COMMENT",
    "tokenize",
    "detect_encoding",
    "NL",
    "untokenize",
    "ENCODING",
    "TokenInfo",
    "TokenError",
    "SEARCHPATH",
    "ATDOLLAR",
    "ATEQUAL",
    "DOLLARNAME",
    "IOREDIRECT",
]
예제 #2
0
from parser import expr
from symbol import sym_name
from token import tok_name
from py2texdata import texfun,texmod,texsym


d=tok_name.copy()
d.update(sym_name)

texop={
    'LPAR':r'\left(',
    'RPAR':r'\right)',
    'PLUS':r'+',
    'MINUS':r'-',
    'GREATER':r'>',
    'SMALLER':r'<',
    'TILDE':r'\~',
    'GREATEREQUAL':r'\ge',
    'SMALLEREQUAL':r'\le',
    'EQEQUAL':r'=',
    'NOTEQUAL':r'\ne',
    'in':r'\in',
    'is':r'\equiv',
    'COMMA':r',',
    }

class tex:
  def __init__(self,e):
    self.out=[]
    try:
      self.t=expr(e).tolist()[1]