Пример #1
0
def get_quote_type(code: str):
    from flynt.lexer.PyToken import PyToken
    g = tokenize.tokenize(io.BytesIO(code.encode("utf-8")).readline)
    next(g)
    token = PyToken(next(g))

    return token.get_quote_type()
Пример #2
0
    def empty_append(self, t: PyToken):
        if t.is_string() and not t.is_raw_string():
            pass
        else:
            self.complete = True

        self.tokens.append(t)
Пример #3
0
    def percent_append(self, t: PyToken):

        # todo handle all cases?
        if not self[0].is_string():
            self.complete = True
            return

        if len(self) == 2:
            self.tokens.append(t)
            if self.is_parseable:
                self.successful = True
            else:
                self.percent_ongoing = True

        else:
            if self.percent_ongoing:
                self.tokens.append(t)
                if t.is_string() and not '{' in str(self):
                    self.string_in_string = True
                if self.is_parseable:
                    self.percent_ongoing = False
                    self.successful = True
            elif t.is_expr_continuation_op():
                self.tokens.append(t)
                self.percent_ongoing = True
            else:
                self.complete = True
                self.successful = self.is_parseable
                return REUSE
Пример #4
0
 def second_append(self, t: PyToken):
     if t.is_string():
         self.tokens[0].tokval += t.tokval
         self.tokens[0].end = t.end
     elif t.is_percent_op():
         self.tokens.append(t)
         self.is_percent_chunk = True
     elif t.is_dot_op():
         self.tokens.append(t)
         self.is_call_chunk = True
     else:
         self.tokens.append(t)
         self.complete = True
Пример #5
0
def get_chunks(code) -> Generator[Chunk, None, None]:
    g = tokenize.tokenize(io.BytesIO(code.encode("utf-8")).readline)
    chunk = Chunk()

    try:
        for item in g:
            t = PyToken(item)
            reuse = chunk.append(t)

            if chunk.complete:

                yield chunk
                chunk = Chunk()
                if reuse:
                    reuse = chunk.append(t)
                    # assert not reuse
                    if chunk.complete:
                        yield chunk
                        chunk = Chunk()

        yield chunk
    except tokenize.TokenError as e:
        if state.verbose:
            traceback.print_exc()
            print(e)
Пример #6
0
    def call_append(self, t: PyToken):

        if t.is_string():
            self.string_in_string = True

        self.tokens.append(t)
        if len(self) > 3 and self.is_parseable:
            self.complete = True
            self.successful = True
Пример #7
0
    def call_append(self, t: PyToken):

        # no string in string
        if t.is_string():
            self.complete = True
            self.successful = False
            return

        self.tokens.append(t)
        if len(self) > 3 and self.is_parseable:
            self.complete = True
            self.successful = True
Пример #8
0
    def call_append(self, t: PyToken):

        if t.is_string():
            self.string_in_string = True

        if len(self) == 2 and t.tokval != 'format':
            self.complete = True
            self.successful = False
            return

        self.tokens.append(t)
        if len(self) > 3 and self.is_parseable:
            self.complete = True
            self.successful = True
Пример #9
0
def get_chunks(code) -> Generator[Chunk, None, None]:
    g = tokenize.tokenize(io.BytesIO(code.encode("utf-8")).readline)
    chunk = Chunk()

    for item in g:
        t = PyToken(item)
        reuse = chunk.append(t)

        if chunk.complete:

            yield chunk
            chunk = Chunk()
            if reuse:
                reuse = chunk.append(t)
                # assert not reuse
                if chunk.complete:
                    yield chunk
                    chunk = Chunk()

    yield chunk
Пример #10
0
    def empty_append(self, t: PyToken):
        if not t.is_string() or t.is_raw_string():
            self.complete = True

        self.tokens.append(t)