Exemplo n.º 1
0
 def __init__(self, language, start_rule):
     self.language = language
     self.start_rule = start_rule
     self.ast = self.loadGrammarAst()
     self.semantic_ast = self.buildSemanticAst()
     self.parser_io = PipeTextIO('java', ['pyinterface.ParserIntf', self.language + '.' + self.language, self.start_rule])
     self.parser_io.connect()
     self.suggestion_io = PipeTextIO('java', ['pyinterface.CompletionSuggestions', self.language, self.start_rule])
     self.suggestion_io.connect()
     self.index = weakref.WeakValueDictionary()
Exemplo n.º 2
0
class Antlr4GenericParser:
    
    def __init__(self, language, start_rule):
        self.language = language
        self.start_rule = start_rule
        self.ast = self.loadGrammarAst()
        self.semantic_ast = self.buildSemanticAst()
        self.parser_io = PipeTextIO('java', ['pyinterface.ParserIntf', self.language + '.' + self.language, self.start_rule])
        self.parser_io.connect()
        self.suggestion_io = PipeTextIO('java', ['pyinterface.CompletionSuggestions', self.language, self.start_rule])
        self.suggestion_io.connect()
        self.index = weakref.WeakValueDictionary()

#         grammars_path = os.path.abspath(os.path.join(os.getcwd(), '..', 'grammars'))
#         grammars_path = os.path.abspath(os.path.join(os.getcwd(), '..'))

    def loadGrammarAst(self):
        import pyde
        
        with open(os.path.join(pyde.__path__[0], '..', 'grammars', self.language, self.language + 'AST.js')) as ast_file:    
            ast_js = json.load(ast_file)
    
        rules = {}
        self.state_ast_rules = {}  
        for name, rule in ast_js.items():
            b = GrammarASTBuilder(self.state_ast_rules)
            ruleCtx = b.visit(rule)
            rules[name] = ruleCtx
            
        return rules

    def buildSemanticAst(self):
        b = SemanticASTBuilder(self.ast)
        return b.build()
    
    def parse(self, text, text_range):
        active_text = text[text_range[0]: text_range[1]]
        self.dirty = False
        
#         p = subprocess.Popen(['java', 'pyinterface.Main', 
#                               self.language + '.' + self.language, 
#                               self.start_rule, '-json', active_text], stdout=subprocess.PIPE).communicate()[0]
#         parse_out = json.loads(p.decode())
#         if self.language == 'vhdl':
#             parser = Antlr4GenericParser('vhdl', 'design_file')
#             import time
#             start_time = time.time()
#             ret = parser.parser_io.communicate(active_text)
#         #     tree = parser.parse(text, (0, len(text)))
#             print("--- %s seconds ---" % (time.time() - start_time))
#             pass
        ret = self.parser_io.communicate(active_text)
        
        parse_out = json.loads(ret)
        self.tokens = TokenSequence(parse_out['tokens'], text_range[0])
        dict_tree = parse_out['tree']

        common_fields = {'ast': self.ast, 'token': self.tokens}
        parse_builder = ParseTreeBuilder(self.tokens, common_fields)
        self.parse_tree = parse_builder.visit(dict_tree)
        
        common_fields = {'_ast': self.semantic_ast}
        builder = SemanticTreeBuilder(self.semantic_ast, common_fields, self.index)
        self.semantic_tree = builder.visit(self.parse_tree)
        if self.language == 'vhdl':
            pass

        parse_out = json.loads(ret)
        return self.semantic_tree


    def place_suggestion(self, suggestion, node, feature, carret_token):
#        if suggestion['type'] == 'org.antlr.v4.runtime.NoViableAltException':
        suggestions = []
        
        if suggestion['type'] == 'org.antlr.v4.runtime.NoViableAltException':
            if suggestion['token'] > carret_token.index:
                parent = node
                node = None
            else:
                parent = None
                
            v = NextRulesVisitor(self.ast, self.semantic_ast, node, parent, carret_token.slice.stop)
            v.visit(self.state_ast_rules[suggestion['state_stack'][0]])
            return v.next_rules
        elif node is not None:
            node, feature = semantic_for_state_stack(node, suggestion['state_stack'])
            while (node):
                parse_node_child = parser_node_child_by_feature(node._parse_node, feature)
                if parse_node_child:
                    suggestions.append(Suggestion(type=node._type, feature=feature, node=node, parse_node=parse_node_child))
                node, feature = semantic_feature_in_parent(node)
                                
        return suggestions
    
    def completion_suggestions(self, text, text_range):
        suggestions = []
        carret_index = text_range[1]
        carret_token = self.tokens.token_at_pos(carret_index)
#         if (carret_token is None) or (carret_index == carret_token.slice.start):
#             carret_token = self.tokens[carret_token.index-1] 
        if (carret_token is None):
            carret_token = self.tokens[0] 

        if not hasattr(carret_token, 'island_grammar_root'): 

            active_text = text[text_range[0]: carret_index]#carret_token.slice.start]
    #         p = subprocess.Popen(['java', 'pyinterface.CompletionSuggestions', 
    #                           self.language, self.start_rule, active_text], stdout=subprocess.PIPE).communicate()[0]
            ret = self.suggestion_io.communicate(active_text)
    #         ret = '[{"state_stack":[153],"type":"org.antlr.v4.runtime.NoViableAltException","token":0}]'
            suggestions_js = json.loads(ret)
            #     carret_token_start_index = start_of_carret_token(parser.tokens, carret_index)
            carret_ctx, feature = semantic_for_rule_node(carret_token)
            node = carret_ctx
            while (node):
                if feature:
                    parse_node_child = parser_node_child_by_feature(node._parse_node, feature)
                else:
                    parse_node_child = node._parse_node

                suggestions.append(Suggestion(type=node._type, feature=feature, node=node, parse_node = parse_node_child))
                node, feature = semantic_feature_in_parent(node)

#             if (carret_ctx is not None): 
#                 suggestions.append(Suggestion(type=carret_ctx.type, feature=feature, node=carret_ctx))
        #        pre_carret_ctx = self.semantic_node_for_token(pre_carret_token)
                
            
            for s in suggestions_js:
                suggestions.extend(self.place_suggestion(s, carret_ctx, feature, carret_token))
        
#         suggestions = []
#         if not suggestions_js:
#             v = NextRulesVisitor(self.ast)
#             v.visit(self.ast[self.start_rule])
#             suggestions.extend(v.next_rules)
#         else:
#             for s in suggestions_js:
#                 suggestions.extend(self.place_suggestion(s, carret_ctx))
#         
#         for s in suggestions: 
#             print(s)
        
        return suggestions
    
    def iter_index_by_type(self, ctx_types):
        for k,v in self.index.items():
            if v._type in ctx_types:
                yield k,v
        pass