Exemple #1
0
def parse_one_thing(src_string):
    """Parse the first form from the string. Return it and the
    remainder of the string."""
    import re
    from hy.lex.lexer import lexer
    from hy.lex.parser import parser
    from rply.errors import LexingError
    tokens = []
    err = None
    for token in lexer.lex(src_string):
        tokens.append(token)
        try:
            model, = parser.parse(
                iter(tokens),
                state=ParserState(src_string, filename=None))
        except (LexingError, LexException) as e:
            err = e
        else:
            return model, src_string[re.match(
                r'.+\n' * (model.end_line - 1)
                    + '.' * model.end_column,
                src_string).end():]
    if err:
        raise err
    raise ValueError("No form found")
Exemple #2
0
def parse_one_thing(src_string):
    """Parse the first form from the string. Return it and the
    remainder of the string."""
    import re
    from hy.lex.lexer import lexer
    from hy.lex.parser import parser
    from rply.errors import LexingError
    tokens = []
    err = None
    for token in lexer.lex(src_string):
        tokens.append(token)
        try:
            model, = parser.parse(
                iter(tokens),
                state=ParserState(src_string, filename=None))
        except (LexingError, LexException) as e:
            err = e
        else:
            return model, src_string[re.match(
                r'.+\n' * (model.end_line - 1)
                    + '.' * model.end_column,
                src_string).end():]
    if err:
        raise err
    raise ValueError("No form found")
Exemple #3
0
def tokenize(source, filename=None):
    """Tokenize a Lisp file or string buffer into internal Hy objects.

    Args:
       source (str): The source to tokenize.
       filename (Optional[str]): The filename corresponding to `source`.

    Returns:
       typing.List[Object]: list of hy object models
    """
    from rply.errors import LexingError

    from hy.lex.lexer import lexer
    from hy.lex.parser import parser

    try:
        return parser.parse(lexer.lex(source), state=ParserState(source, filename))
    except LexingError as e:
        pos = e.getsourcepos()
        raise LexException(
            "Could not identify the next token.",
            None,
            filename,
            source,
            max(pos.lineno, 1),
            max(pos.colno, 1),
        )
    except LexException as e:
        raise e
Exemple #4
0
def tokenize(buf):
    """
    Tokenize a Lisp file or string buffer into internal Hy objects.
    """
    try:
        return parser.parse(lexer.lex(buf))
    except LexingError as e:
        pos = e.getsourcepos()
        raise LexException("Could not identify the next token.", pos.lineno,
                           pos.colno)
Exemple #5
0
def tokenize(buf):
    """
    Tokenize a Lisp file or string buffer into internal Hy objects.
    """
    try:
        return parser.parse(lexer.lex(buf))
    except LexingError as e:
        pos = e.getsourcepos()
        raise LexException("Could not identify the next token.",
                           pos.lineno, pos.colno)
Exemple #6
0
def tokenize(buf):
    """
    Tokenize a Lisp file or string buffer into internal Hy objects.
    """
    from hy.lex.lexer import lexer
    from hy.lex.parser import parser
    from rply.errors import LexingError
    try:
        return parser.parse(lexer.lex(buf))
    except LexingError as e:
        pos = e.getsourcepos()
        raise LexException("Could not identify the next token.", pos.lineno,
                           pos.colno, buf)
    except LexException as e:
        if e.source is None:
            e.source = buf
        raise
Exemple #7
0
def tokenize(source, filename=None):
    """ Tokenize a Lisp file or string buffer into internal Hy objects.

    Parameters
    ----------
    source: str
        The source to tokenize.
    filename: str, optional
        The filename corresponding to `source`.
    """
    from hy.lex.lexer import lexer
    from hy.lex.parser import parser
    from rply.errors import LexingError
    try:
        return parser.parse(lexer.lex(source),
                            state=ParserState(source, filename))
    except LexingError as e:
        pos = e.getsourcepos()
        raise LexException("Could not identify the next token.", None,
                           filename, source, max(pos.lineno, 1),
                           max(pos.colno, 1))
    except LexException as e:
        raise e
Exemple #8
0
def tokenize(source, filename=None):
    """ Tokenize a Lisp file or string buffer into internal Hy objects.

    Parameters
    ----------
    source: str
        The source to tokenize.
    filename: str, optional
        The filename corresponding to `source`.
    """
    from hy.lex.lexer import lexer
    from hy.lex.parser import parser
    from rply.errors import LexingError
    try:
        return parser.parse(lexer.lex(source),
                            state=ParserState(source, filename))
    except LexingError as e:
        pos = e.getsourcepos()
        raise LexException("Could not identify the next token.",
                           None, filename, source,
                           max(pos.lineno, 1),
                           max(pos.colno, 1))
    except LexException as e:
        raise e