コード例 #1
0
def tokenize(source, filename=None):
    """Tokenize a Lisp file or string buffer into internal Hy objects.

    Args:
       source (str): The source to tokenize.
       filename (Optional[str]): The filename corresponding to `source`.

    Returns:
       typing.List[Object]: list of hy object models
    """
    from rply.errors import LexingError

    from hy.lex.lexer import lexer
    from hy.lex.parser import parser

    try:
        return parser.parse(lexer.lex(source), state=ParserState(source, filename))
    except LexingError as e:
        pos = e.getsourcepos()
        raise LexException(
            "Could not identify the next token.",
            None,
            filename,
            source,
            max(pos.lineno, 1),
            max(pos.colno, 1),
        )
    except LexException as e:
        raise e
コード例 #2
0
ファイル: __init__.py プロジェクト: zenhack/hy
def tokenize(buf):
    """
    Tokenize a Lisp file or string buffer into internal Hy objects.
    """
    try:
        return parser.parse(lexer.lex(buf))
    except LexingError as e:
        pos = e.getsourcepos()
        raise LexException("Could not identify the next token.", pos.lineno,
                           pos.colno)
コード例 #3
0
def tokenize(buf):
    """
    Tokenize a Lisp file or string buffer into internal Hy objects.
    """
    from hy.lex.lexer import lexer
    from hy.lex.parser import parser
    from rply.errors import LexingError
    try:
        return parser.parse(lexer.lex(buf))
    except LexingError as e:
        pos = e.getsourcepos()
        raise LexException("Could not identify the next token.", pos.lineno,
                           pos.colno, buf)
    except LexException as e:
        if e.source is None:
            e.source = buf
        raise
コード例 #4
0
ファイル: __init__.py プロジェクト: zequequiel/hy
def tokenize(source, filename=None):
    """ Tokenize a Lisp file or string buffer into internal Hy objects.

    Parameters
    ----------
    source: str
        The source to tokenize.
    filename: str, optional
        The filename corresponding to `source`.
    """
    from hy.lex.lexer import lexer
    from hy.lex.parser import parser
    from rply.errors import LexingError
    try:
        return parser.parse(lexer.lex(source),
                            state=ParserState(source, filename))
    except LexingError as e:
        pos = e.getsourcepos()
        raise LexException("Could not identify the next token.", None,
                           filename, source, max(pos.lineno, 1),
                           max(pos.colno, 1))
    except LexException as e:
        raise e