Esempio n. 1
0
def getTokens(command):
    """Return list of token tuples for command."""

    # In case the command is unicode try encoding it
    if type(command) == unicode:
        try:
            command = command.encode(wx.GetDefaultPyEncoding())
        except UnicodeEncodeError:
            pass  # otherwise leave it alone

    f = cStringIO.StringIO(command)
    # tokens is a list of token tuples, each looking like:
    # (type, string, (srow, scol), (erow, ecol), line)
    tokens = []
    # Can't use list comprehension:
    #   tokens = [token for token in tokenize.generate_tokens(f.readline)]
    # because of need to append as much as possible before TokenError.
    try:
        ##        This code wasn't backward compatible with Python 2.1.3.
        ##
        ##        for token in tokenize.generate_tokens(f.readline):
        ##            tokens.append(token)

        # This works with Python 2.1.3 (with nested_scopes).
        def eater(*args):
            tokens.append(args)

        tokenize.tokenize_loop(f.readline, eater)
    except tokenize.TokenError:
        # This is due to a premature EOF, which we expect since we are
        # feeding in fragments of Python code.
        pass
    return tokens
Esempio n. 2
0
def getTokens(command):
    """Return list of token tuples for command."""

    # In case the command is unicode try encoding it
    if type(command) == unicode:
        try:
            command = command.encode(wx.GetDefaultPyEncoding())
        except UnicodeEncodeError:
            pass # otherwise leave it alone
                
    f = cStringIO.StringIO(command)
    # tokens is a list of token tuples, each looking like: 
    # (type, string, (srow, scol), (erow, ecol), line)
    tokens = []
    # Can't use list comprehension:
    #   tokens = [token for token in tokenize.generate_tokens(f.readline)]
    # because of need to append as much as possible before TokenError.
    try:
##        This code wasn't backward compatible with Python 2.1.3.
##
##        for token in tokenize.generate_tokens(f.readline):
##            tokens.append(token)

        # This works with Python 2.1.3 (with nested_scopes).
        def eater(*args):
            tokens.append(args)
        tokenize.tokenize_loop(f.readline, eater)
    except tokenize.TokenError:
        # This is due to a premature EOF, which we expect since we are
        # feeding in fragments of Python code.
        pass
    return tokens    
Esempio n. 3
0
def getTokens(command):
    """Return list of token tuples for command."""

    # In case the command is unicode try encoding it
    if isinstance(command, string_types):
        try:
            command = command.encode('utf-8')
        except UnicodeEncodeError:
            pass  # otherwise leave it alone

    f = BytesIO(command)
    # tokens is a list of token tuples, each looking like:
    # (type, string, (srow, scol), (erow, ecol), line)
    tokens = []
    # Can't use list comprehension:
    #   tokens = [token for token in tokenize.generate_tokens(f.readline)]
    # because of need to append as much as possible before TokenError.
    try:
        if not PY3:

            def eater(*args):
                tokens.append(args)

            tokenize.tokenize_loop(f.readline, eater)
        else:
            tokens = list(tokenize.tokenize(f.readline))
    except tokenize.TokenError:
        # This is due to a premature EOF, which we expect since we are
        # feeding in fragments of Python code.
        pass
    return tokens
Esempio n. 4
0
def getTokens(command):
    """Return list of token tuples for command."""

    # In case the command is unicode try encoding it
    if isinstance(command,  string_types):
        try:
            command = command.encode('utf-8')
        except UnicodeEncodeError:
            pass # otherwise leave it alone

    f = BytesIO(command)
    # tokens is a list of token tuples, each looking like:
    # (type, string, (srow, scol), (erow, ecol), line)
    tokens = []
    # Can't use list comprehension:
    #   tokens = [token for token in tokenize.generate_tokens(f.readline)]
    # because of need to append as much as possible before TokenError.
    try:
        if not PY3:
            def eater(*args):
                tokens.append(args)
            tokenize.tokenize_loop(f.readline, eater)
        else:
            for t in tokenize.tokenize(f.readline):
                tokens.append(t)
    except tokenize.TokenError:
        # This is due to a premature EOF, which we expect since we are
        # feeding in fragments of Python code.
        pass
    return tokens
Esempio n. 5
0
def getTokens(command):
    """Return list of token tuples for command."""
    command = str(command)  # In case the command is unicode, which fails.
    f = cStringIO.StringIO(command)
    # tokens is a list of token tuples, each looking like: 
    # (type, string, (srow, scol), (erow, ecol), line)
    tokens = []
    # Can't use list comprehension:
    #   tokens = [token for token in tokenize.generate_tokens(f.readline)]
    # because of need to append as much as possible before TokenError.
    try:
##        This code wasn't backward compatible with Python 2.1.3.
##
##        for token in tokenize.generate_tokens(f.readline):
##            tokens.append(token)
        # This works with Python 2.1.3 (with nested_scopes).
##        def eater(*args):
##            tokens.append(args)
##        tokenize.tokenize_loop(f.readline, eater)
        eater = _EaterTokens(tokens) #Con una lambda no se lo come ¿?
        tokenize.tokenize_loop(f.readline, eater)
    except tokenize.TokenError:
        # This is due to a premature EOF, which we expect since we are
        # feeding in fragments of Python code.
        pass
    return tokens    
Esempio n. 6
0
def getTokens(command):
    """Return list of token tuples for command."""
    command = str(command)  # In case the command is unicode, which fails.
    f = cStringIO.StringIO(command)
    # tokens is a list of token tuples, each looking like:
    # (type, string, (srow, scol), (erow, ecol), line)
    tokens = []
    # Can't use list comprehension:
    #   tokens = [token for token in tokenize.generate_tokens(f.readline)]
    # because of need to append as much as possible before TokenError.
    try:
        ##        This code wasn't backward compatible with Python 2.1.3.
        ##
        ##        for token in tokenize.generate_tokens(f.readline):
        ##            tokens.append(token)
        # This works with Python 2.1.3 (with nested_scopes).
        ##        def eater(*args):
        ##            tokens.append(args)
        ##        tokenize.tokenize_loop(f.readline, eater)
        eater = _EaterTokens(tokens)  #Con una lambda no se lo come ¿?
        tokenize.tokenize_loop(f.readline, eater)
    except tokenize.TokenError:
        # This is due to a premature EOF, which we expect since we are
        # feeding in fragments of Python code.
        pass
    return tokens