def _line_tokens(line):
    """Helper for has_comment and ends_in_comment_or_string."""
    readline = StringIO(line).readline
    toktypes = set()
    try:
        for t in generate_tokens(readline):
            toktypes.add(t[0])
    except TokenError as e:
        # There are only two cases where a TokenError is raised.
        if 'multi-line string' in e.args[0]:
            toktypes.add(_MULTILINE_STRING)
        else:
            toktypes.add(_MULTILINE_STRUCTURE)
    return toktypes
def _line_tokens(line):
    """Helper for has_comment and ends_in_comment_or_string."""
    readline = StringIO(line).readline
    toktypes = set()
    try:
        for t in generate_tokens(readline):
            toktypes.add(t[0])
    except TokenError as e:
        # There are only two cases where a TokenError is raised.
        if 'multi-line string' in e.args[0]:
            toktypes.add(_MULTILINE_STRING)
        else:
            toktypes.add(_MULTILINE_STRUCTURE)
    return toktypes
Exemple #3
0
def has_comment(src):
    """Indicate whether an input line has (i.e. ends in, or is) a comment.

    This uses tokenize, so it can distinguish comments from # inside strings.

    Parameters
    ----------
    src : string
      A single line input string.

    Returns
    -------
    comment : bool
        True if source has a comment.
    """
    readline = StringIO(src).readline
    toktypes = set()
    try:
        for t in generate_tokens(readline):
            toktypes.add(t[0])
    except TokenError:
        pass
    return(tokenize2.COMMENT in toktypes)
Exemple #4
0
def has_comment(src):
    """Indicate whether an input line has (i.e. ends in, or is) a comment.

    This uses tokenize, so it can distinguish comments from # inside strings.

    Parameters
    ----------
    src : string
      A single line input string.

    Returns
    -------
    comment : bool
        True if source has a comment.
    """
    readline = StringIO(src).readline
    toktypes = set()
    try:
        for t in generate_tokens(readline):
            toktypes.add(t[0])
    except TokenError:
        pass
    return (tokenize2.COMMENT in toktypes)
 def reset_tokenizer(self):
     self.tokenizer = generate_tokens(self.get_line)
 def reset_tokenizer(self):
     it = iter(self.buf)
     self.tokenizer = generate_tokens(it.__next__)
Exemple #7
0
 def reset_tokenizer(self):
     self.tokenizer = generate_tokens(self.get_line)
 def reset_tokenizer(self):
     it = iter(self.buf)
     self.tokenizer = generate_tokens(it.__next__)
Exemple #9
0
 def reset_tokenizer(self):
     it = iter(self.buf)
     nxt = it.__next__ if PY3 else it.next
     self.tokenizer = generate_tokens(nxt)
 def reset_tokenizer(self):
     it = iter(self.buf)
     nxt = it.__next__ if PY3 else it.next
     self.tokenizer = generate_tokens(nxt)