Пример #1
0
def get_stack_at_position(grammar, code_lines, module, pos):
    """
    Returns the possible node names (e.g. import_from, xor_test or yield_stmt).
    """
    class EndMarkerReached(Exception):
        pass

    def tokenize_without_endmarker(code):
        tokens = tokenize.source_tokens(code, use_exact_op_types=True)
        for token_ in tokens:
            if token_.string == safeword:
                raise EndMarkerReached()
            elif token_.type == token.DEDENT and False:
                # Ignore those. Error statements should not contain them, if
                # they do it's for cases where an indentation happens and
                # before the endmarker we still see them.
                pass
            else:
                yield token_

    code = _get_code_for_stack(code_lines, module, pos)
    # We use a word to tell Jedi when we have reached the start of the
    # completion.
    safeword = 'XXX_USER_WANTS_TO_COMPLETE_HERE_WITH_JEDI'
    # Remove as many indents from **all** code lines as possible.
    code = dedent(code + safeword)

    p = parser.Parser(grammar, code, start_parsing=False)
    try:
        p.parse(tokenizer=tokenize_without_endmarker(code))
    except EndMarkerReached:
        return Stack(p.stack)
    raise SystemError("This really shouldn't happen. There's a bug in Jedi.")
Пример #2
0
    def test_end_pos_one_line(self):
        parsed = parser.Parser(parser.load_grammar(), u('''
def testit():
    a = "huhu"
'''))
        tok = parsed.module.subscopes[0].statements[0].children[2]
        assert tok.end_pos == (3, 14)
Пример #3
0
    def test_end_pos_one_line(self):
        parsed = parser.Parser('''
def testit():
    a = "huhu"
''')
        tok = parsed.top_module.subscopes[0].statements[0].token_list[2]
        self.assertEqual(tok.end_pos, (3, 14))
Пример #4
0
    def test_end_pos_multi_line(self):
        parsed = parser.Parser(parser.load_grammar(), u('''
def testit():
    a = """huhu
asdfasdf""" + "h"
'''))
        tok = parsed.module.subscopes[0].statements[0].children[2].children[0]
        assert tok.end_pos == (4, 11)
Пример #5
0
    def test_end_pos_multi_line(self):
        parsed = parser.Parser('''
def testit():
    a = """huhu
asdfasdf""" + "h"
''')
        tok = parsed.top_module.subscopes[0].statements[0].token_list[2]
        self.assertEqual(tok.end_pos, (4, 11))
Пример #6
0
def get_stack_at_position(grammar, code_lines, module, pos):
    """
    Returns the possible node names (e.g. import_from, xor_test or yield_stmt).
    """
    user_stmt = module.get_statement_for_position(pos)

    if user_stmt is not None and user_stmt.type in ('indent', 'dedent'):
        code = u('')
    else:
        if user_stmt is None:
            user_stmt = module.get_leaf_for_position(pos,
                                                     include_prefixes=True)
        if pos <= user_stmt.start_pos:
            try:
                leaf = user_stmt.get_previous_leaf()
            except IndexError:
                pass
            else:
                user_stmt = module.get_statement_for_position(leaf.start_pos)

        if user_stmt.type == 'error_leaf' or user_stmt.type == 'string':
            # Error leafs cannot be parsed, completion in strings is also
            # impossible.
            raise OnErrorLeaf(user_stmt)

        code = _get_code(code_lines, user_stmt.start_pos, pos)
        if code == ';':
            # ; cannot be parsed.
            code = u('')

        # Remove whitespace at the end. Necessary, because the tokenizer will parse
        # an error token (there's no new line at the end in our case). This doesn't
        # alter any truth about the valid tokens at that position.
        code = code.strip('\t ')

    class EndMarkerReached(Exception):
        pass

    def tokenize_without_endmarker(code):
        tokens = tokenize.source_tokens(code, use_exact_op_types=True)
        for token_ in tokens:
            if token_[0] == token.ENDMARKER:
                raise EndMarkerReached()
            elif token_[0] == token.DEDENT:
                # Ignore those. Error statements should not contain them, if
                # they do it's for cases where an indentation happens and
                # before the endmarker we still see them.
                pass
            else:
                yield token_

    p = parser.Parser(grammar, code, start_parsing=False)
    try:
        p.parse(tokenizer=tokenize_without_endmarker(code))
    except EndMarkerReached:
        return Stack(p.stack)