Example #1
0
def parse():

    data = bufferToString(buffer)
    print("DATA: ",data)
        #print("TOTO VALIDUJEM: "+ anArg)
        #data = anArg
    
    lexer = python_lexer.PythonLexer()
    lexer.input(data)
        #for token in lexer:
            #print(token.value)
    lexer = python_lexer.PythonLexer()
    res = python_parser.parse_data(data,lexer)
    
    tree, delParts = utils.traverse_ast_test(res)  
        
    try:
        ast_tree = utils.parse_with_ast(res)
            #print(utils.astNode_to_tree(ast_tree))
    except Exception as error:
        print("Error in part which that be valid: ",error)
    
    delLines = []
    for delPart in delParts:
        for i in range(delPart[0],delPart[1]+1):
                #tuto su veci
            delLines.append(i)
    
    delLines+=res.emptyLinesNums
    temp={line for line in delLines}
    delLines = [line for line in temp]
    delLines.sort()
    return (ast_tree, delLines)
Example #2
0
def parseAndValidate(connection):
    try:
        arrayOfProblems = None
        arrayOfProblems = []
        # Decode the data
        #data = anArg.decode("utf-8")
        data = bufferToString(buffer)
        print("DATA: ",data)
        #print("TOTO VALIDUJEM: "+ anArg)
        #data = anArg
    
        lexer = python_lexer.PythonLexer()
        lexer.input(data)
        #for token in lexer:
            #print(token.value)
        lexer = python_lexer.PythonLexer()
        res = python_parser.parse_data(data,lexer)
    
        tree, delParts = utils.traverse_ast_test(res)  
        
        try:
            ast_tree = utils.parse_with_ast(res)
            #print(utils.astNode_to_tree(ast_tree))
        except Exception as error:
            print("Error in part which that be valid: ",error)
    
        parser=Parser()
        module_scope=parser.eval_code(ast_tree)
        delLines = []
        for delPart in delParts:
            for i in range(delPart[0],delPart[1]+1):
                #tuto su veci
                delLines.append(i)
        
        #processing syntax problems
        for line in delLines:
            p = []
            p.append(line)
            p.append('invalid syntax')
            arrayOfProblems.append(p)
            
        delLines+=res.emptyLinesNums
        temp={line for line in delLines}
        delLines = [line for line in temp]
        delLines.sort()
        print("DEL LINES:", delLines)
        
        
        problem_symbols={problem for problem in parser.problems}
        
        listOfLines = []
        for problem in problem_symbols:   
            if not hasattr(problem.node, 'processed'):
                problem.node.lineno=utils.getOriginLineNum(problem.node.lineno,delLines)        
                problem.node.processed=1
            print('P: ', problem,'at line: ',problem.node.lineno)
            if not (problem.node.lineno in listOfLines):
                b = []
                b.append(problem.node.lineno)
                b.append(str(problem))
                arrayOfProblems.append(b)
                listOfLines.append(problem.node.lineno)
        
        #pickleProblems = pickleListOfProblems(arrayOfProblems)
        pickleProblems = pickleListOfProblems(arrayOfProblems)
        pickleProblemsStr = pickle.dumps(pickleProblems, 2)
        connection.sendall(pickleProblemsStr)
        return (ast_tree, delLines)
    except:
        traceback.print_exc()
        #sock.close()
        connection.sendall(bytes("endOfValidation", "utf-8"))
Example #3
0
def get_auto_completion(host, client_address, buffer_used, variable_line,
                        line_number):
    '''
    This method is called when user presses '.' symbol.
    
    We want to find all possible attributes for given symbol on given buffer at given line.
    
    Call eval_in_root plus send nodeAst - means evaluation will stop once given node is reached.
    '''
    try:
        #Get leading spaces so the indent matches current level
        variable_line = variable_line.replace('\t', '    ')
        leading_spaces = len(variable_line) - len(variable_line.lstrip())

        # Get only relevant part of line
        line_modified = utils.getObjectStringFromLine(variable_line)

        # Replace given line with string which will be resolved - much easier for our solution
        resolving_name = 'target_for_completion'
        buffer = buffer_used
        buffer[line_number - 1] = variable_line[
            0:leading_spaces] + resolving_name + ' = ' + line_modified
        buffer = buffer[0:line_number]

        # Parse modified buffer and eval
        data = buffer_to_string(buffer)
        lexer = python_lexer.PythonLexer()
        lexer.input(data)
        lexer = python_lexer.PythonLexer()
        res = python_parser.parse_data(data, lexer)

        tree, del_parts = utils.traverse_ast_test(res)
        ast_tree = utils.parse_with_ast(res)
        #        print("TREE:"+str(utils.astNode_to_tree(ast_tree)))

        del_lines = []
        for delPart in del_parts:
            for i in range(delPart[0], delPart[1] + 1):
                del_lines.append(i)

        del_lines += res.emptyLinesNums
        temp = {line for line in del_lines}
        del_lines = [line for line in temp]
        del_lines.sort()

        current_line_number = utils.getCurrentLineNum(line_number, del_lines)
        parser = FinalParser(1)
        parser.eval_in_root(ast_tree)

        #        print(str(parser.scopes[0]))

        #Remove inf_ attribtues since those are used for internal purposes
        list_of_all = parser.get_all_possible_attr(resolving_name)
        reduced_list = []
        for item in list_of_all:
            if not item[0].startswith('inf_'):
                reduced_list.append(item)

        # Respond to the client.
        response_completion = json.dumps({'options': reduced_list})
        host.respond(bytes(response_completion, 'UTF-8'), client_address)
    except:
        traceback.print_exc()
        # Send an empty list if any error occurred
        list_for_completion = []
        response_completion_error = json.dumps(
            {'options': list_for_completion})
        host.respond(bytes(response_completion_error, 'UTF-8'), client_address)
Example #4
0
def parse_and_validate(host, dictionaryID, client_address,
                       number_of_iterations):
    """
    Main method which evaluates whole code and sends respond with errors and warnings.
    """
    try:
        start_time = time.time() * 1000
        log_to_file("START OF VALIDATION: " + str(start_time) +
                    ", Number of iterations: " + str(number_of_iterations))

        buffer = openBuffers[dictionaryID]

        problems_list = []
        warnings_list = []

        # Decode the data
        data = buffer_to_string(buffer)
        lexer = python_lexer.PythonLexer()
        lexer.input(data)

        #for token in lexer:
        #    print(token.value)

        lexer = python_lexer.PythonLexer()
        res = python_parser.parse_data(data, lexer)
        #log_time("AFTER PARSE DATA: ", time.time() * 1000, start_time)

        tree, del_parts = utils.traverse_ast_test(res)
        #log_time("AFTER TRAVERSE AST: ", time.time() * 1000, start_time)

        ast_tree = utils.parse_with_ast(res)
        #log_time("AFTER PARSE WITH AST: ", time.time() * 1000, start_time)

        parser = FinalParser(number_of_iterations)
        parser.eval_in_root(ast_tree)
        del_lines = []
        for delPart in del_parts:
            for i in range(delPart[0], delPart[1] + 1):
                del_lines.append(i)

        #log_time("AFTER EVAL IN ROOT: ", time.time() * 1000, start_time)

        #processing syntax problems
        for line in del_lines:
            p = []
            p.append(line)
            p.append('Invalid syntax.')
            problems_list.append(p)

        del_lines += res.emptyLinesNums
        temp = {line for line in del_lines}
        del_lines = [line for line in temp]
        del_lines.sort()

        list_of_used_lines = []

        #Problems
        for problem in parser.problems:
            if not hasattr(problem.node, 'processed'):
                problem.node.lineno = utils.getOriginLineNum(
                    problem.node.lineno, del_lines)
                problem.node.processed = 1

            if not (problem.node.lineno in list_of_used_lines):
                b = []
                b.append(problem.node.lineno)
                b.append(str(problem))
                problems_list.append(b)
                list_of_used_lines.append(problem.node.lineno)

        #Warnings
        for warning in parser.warnings:
            if not hasattr(warning.node, 'processed'):
                warning.node.lineno = utils.getOriginLineNum(
                    warning.node.lineno, del_lines)
                warning.node.processed = 1
            w = []
            w.append(warning.node.lineno)
            w.append(str(warning))
            warnings_list.append(w)

        log_to_file("Problems: " + str(problems_list))
        log_to_file("Warnings: " + str(warnings_list))
        log_to_file("Validation completed...")

        problems = json.dumps({
            'problems': problems_list,
            'warnings': warnings_list
        })
        #print("DUMPED THING: "+str(problems))
        host.respond(bytes(problems, "UTF-8"), client_address)

        host.validationRunning = False
        log_to_file('----------------------------')

    except IndentationError as error:
        log_to_file("Indentation error in parsing.")
        traceback.print_exc()

        indent_error = json.dumps({
            'message': error.msg,
            'line_number': error.lineno
        })
        host.respond(bytes(indent_error, "UTF-8"), client_address)

        host.validationRunning = False
    except python_parser.RobustParserError as error:
        log_to_file("Error in parsing: returning correct line number.")

        b = []
        b.append(error.data.lineno)
        b.append("invalid syntax")
        problems_list.append(b)

        problems = json.dumps({
            'problems': problems_list,
            'warnings': warnings_list
        })
        host.respond(bytes(problems, "UTF-8"), client_address)
        host.validationRunning = False
    except Exception as error:
        log_to_file("Error in parsing: ")
        traceback.print_exc()
        #connection.sendall(bytes("endOfValidation: "+error, "utf-8"))
        #host.respond(bytes("endOfValidation", "utf-8"), client_address)
        error_problems_response = json.dumps({'problems': [], 'warnings': []})
        host.respond(bytes(error_problems_response, "UTF-8"), client_address)
        host.validationRunning = False
Example #5
0
def get_auto_completion(host, client_address, buffer_used, variable_line, line_number):
    '''
    This method is called when user presses '.' symbol.
    
    We want to find all possible attributes for given symbol on given buffer at given line.
    
    Call eval_in_root plus send nodeAst - means evaluation will stop once given node is reached.
    '''
    try:
        #Get leading spaces so the indent matches current level
        variable_line = variable_line.replace('\t', '    ')
        leading_spaces = len(variable_line) - len(variable_line.lstrip())
        
        # Get only relevant part of line
        line_modified = utils.getObjectStringFromLine(variable_line)
        
        # Replace given line with string which will be resolved - much easier for our solution
        resolving_name = 'target_for_completion'
        buffer = buffer_used
        buffer[line_number - 1] = variable_line[0:leading_spaces] + resolving_name + ' = ' + line_modified
        buffer = buffer[0:line_number]
        
        # Parse modified buffer and eval 
        data = buffer_to_string(buffer)
        lexer = python_lexer.PythonLexer()
        lexer.input(data)
        lexer = python_lexer.PythonLexer()
        res = python_parser.parse_data(data,lexer)
        
        tree, del_parts = utils.traverse_ast_test(res)
        ast_tree = utils.parse_with_ast(res)  
#        print("TREE:"+str(utils.astNode_to_tree(ast_tree)))

        del_lines = []
        for delPart in del_parts:
            for i in range(delPart[0],delPart[1]+1):
                del_lines.append(i) 
        
        del_lines+=res.emptyLinesNums
        temp={line for line in del_lines}
        del_lines = [line for line in temp]
        del_lines.sort()
        
        current_line_number = utils.getCurrentLineNum(line_number, del_lines)
        parser = FinalParser(1)
        parser.eval_in_root(ast_tree)
        
#        print(str(parser.scopes[0]))
        
        #Remove inf_ attribtues since those are used for internal purposes
        list_of_all = parser.get_all_possible_attr(resolving_name)
        reduced_list = []
        for item in list_of_all:
            if not item[0].startswith('inf_'):
                reduced_list.append(item)
        
        # Respond to the client.
        response_completion = json.dumps({'options' : reduced_list})
        host.respond(bytes(response_completion, 'UTF-8'), client_address)
    except:
        traceback.print_exc()
        # Send an empty list if any error occurred
        list_for_completion = []
        response_completion_error = json.dumps({'options' : list_for_completion})
        host.respond(bytes(response_completion_error, 'UTF-8'), client_address)
Example #6
0
def parse_and_validate(host, dictionaryID, client_address, number_of_iterations):
    """
    Main method which evaluates whole code and sends respond with errors and warnings.
    """
    try:
        start_time = time.time() * 1000
        log_to_file("START OF VALIDATION: "+str(start_time)+", Number of iterations: "+str(number_of_iterations))
        
        buffer = openBuffers[dictionaryID]
        
        problems_list = []
        warnings_list = []        
        
        # Decode the data
        data = buffer_to_string(buffer)
        lexer = python_lexer.PythonLexer()
        lexer.input(data)
        
        #for token in lexer:
        #    print(token.value)
            
        lexer = python_lexer.PythonLexer()
        res = python_parser.parse_data(data,lexer)
        #log_time("AFTER PARSE DATA: ", time.time() * 1000, start_time)
        
        tree, del_parts = utils.traverse_ast_test(res)       
        #log_time("AFTER TRAVERSE AST: ", time.time() * 1000, start_time)  
        
        ast_tree = utils.parse_with_ast(res)   
        #log_time("AFTER PARSE WITH AST: ", time.time() * 1000, start_time)
        
        parser=FinalParser(number_of_iterations)
        parser.eval_in_root(ast_tree)
        del_lines = []
        for delPart in del_parts:
            for i in range(delPart[0],delPart[1]+1):
                del_lines.append(i)
                      
        #log_time("AFTER EVAL IN ROOT: ", time.time() * 1000, start_time)
        
        #processing syntax problems
        for line in del_lines:
            p = []
            p.append(line)
            p.append('Invalid syntax.')
            problems_list.append(p)
            
        del_lines+=res.emptyLinesNums
        temp={line for line in del_lines}
        del_lines = [line for line in temp]
        del_lines.sort()
        
        list_of_used_lines = []
        
        #Problems
        for problem in parser.problems:   
            if not hasattr(problem.node, 'processed'):
                problem.node.lineno=utils.getOriginLineNum(problem.node.lineno,del_lines)        
                problem.node.processed=1

            if not (problem.node.lineno in list_of_used_lines):
                b = []
                b.append(problem.node.lineno)
                b.append(str(problem))
                problems_list.append(b)
                list_of_used_lines.append(problem.node.lineno)
                
        #Warnings
        for warning in parser.warnings:
            if not hasattr(warning.node, 'processed'):
                warning.node.lineno=utils.getOriginLineNum(warning.node.lineno,del_lines)        
                warning.node.processed=1
            w = []
            w.append(warning.node.lineno)
            w.append(str(warning))
            warnings_list.append(w)
        
        log_to_file("Problems: "+str(problems_list))
        log_to_file("Warnings: "+str(warnings_list))
        log_to_file("Validation completed...")

        problems = json.dumps({'problems' : problems_list, 'warnings' : warnings_list})
        #print("DUMPED THING: "+str(problems))
        host.respond(bytes(problems, "UTF-8"), client_address)
        
        
        host.validationRunning = False
        log_to_file('----------------------------')
        
    except IndentationError as error:
        log_to_file("Indentation error in parsing.")
        traceback.print_exc()
        
        indent_error = json.dumps({'message' : error.msg, 'line_number' : error.lineno})
        host.respond(bytes(indent_error,"UTF-8"), client_address)
        
        host.validationRunning = False
    except python_parser.RobustParserError as error:
        log_to_file("Error in parsing: returning correct line number.")
        
        b = []
        b.append(error.data.lineno)
        b.append("invalid syntax")
        problems_list.append(b)
        
        problems = json.dumps({'problems' : problems_list, 'warnings' : warnings_list})
        host.respond(bytes(problems, "UTF-8"), client_address)
        host.validationRunning = False
    except Exception as error:
        log_to_file("Error in parsing: ")
        traceback.print_exc()
        #connection.sendall(bytes("endOfValidation: "+error, "utf-8"))
        #host.respond(bytes("endOfValidation", "utf-8"), client_address)
        error_problems_response = json.dumps({'problems' : [], 'warnings' : []})
        host.respond(bytes(error_problems_response, "UTF-8"), client_address)
        host.validationRunning = False