Example #1
0
 def test_lineNums(self):
     delParts = [5, 6, 7, 8, 9, 13, 14, 15, 16, 17]
     num = utils.getOriginLineNum(6, delParts)
     num1 = utils.getOriginLineNum(10, delParts)
     self.assertEqual(num, 11)
     self.assertEqual(num1, 20)
Example #2
0
def parseAndValidate(connection):
    try:
        arrayOfProblems = None
        arrayOfProblems = []
        # Decode the data
        #data = anArg.decode("utf-8")
        data = bufferToString(buffer)
        print("DATA: ",data)
        #print("TOTO VALIDUJEM: "+ anArg)
        #data = anArg
    
        lexer = python_lexer.PythonLexer()
        lexer.input(data)
        #for token in lexer:
            #print(token.value)
        lexer = python_lexer.PythonLexer()
        res = python_parser.parse_data(data,lexer)
    
        tree, delParts = utils.traverse_ast_test(res)  
        
        try:
            ast_tree = utils.parse_with_ast(res)
            #print(utils.astNode_to_tree(ast_tree))
        except Exception as error:
            print("Error in part which that be valid: ",error)
    
        parser=Parser()
        module_scope=parser.eval_code(ast_tree)
        delLines = []
        for delPart in delParts:
            for i in range(delPart[0],delPart[1]+1):
                #tuto su veci
                delLines.append(i)
        
        #processing syntax problems
        for line in delLines:
            p = []
            p.append(line)
            p.append('invalid syntax')
            arrayOfProblems.append(p)
            
        delLines+=res.emptyLinesNums
        temp={line for line in delLines}
        delLines = [line for line in temp]
        delLines.sort()
        print("DEL LINES:", delLines)
        
        
        problem_symbols={problem for problem in parser.problems}
        
        listOfLines = []
        for problem in problem_symbols:   
            if not hasattr(problem.node, 'processed'):
                problem.node.lineno=utils.getOriginLineNum(problem.node.lineno,delLines)        
                problem.node.processed=1
            print('P: ', problem,'at line: ',problem.node.lineno)
            if not (problem.node.lineno in listOfLines):
                b = []
                b.append(problem.node.lineno)
                b.append(str(problem))
                arrayOfProblems.append(b)
                listOfLines.append(problem.node.lineno)
        
        #pickleProblems = pickleListOfProblems(arrayOfProblems)
        pickleProblems = pickleListOfProblems(arrayOfProblems)
        pickleProblemsStr = pickle.dumps(pickleProblems, 2)
        connection.sendall(pickleProblemsStr)
        return (ast_tree, delLines)
    except:
        traceback.print_exc()
        #sock.close()
        connection.sendall(bytes("endOfValidation", "utf-8"))
Example #3
0
 def test_lineNums(self):
     delParts = [5,6,7,8,9,13,14,15,16,17]
     num = utils.getOriginLineNum(6, delParts)
     num1 = utils.getOriginLineNum(10, delParts) 
     self.assertEqual(num,11)
     self.assertEqual(num1,20)
Example #4
0
def parse_and_validate(host, dictionaryID, client_address,
                       number_of_iterations):
    """
    Main method which evaluates whole code and sends respond with errors and warnings.
    """
    try:
        start_time = time.time() * 1000
        log_to_file("START OF VALIDATION: " + str(start_time) +
                    ", Number of iterations: " + str(number_of_iterations))

        buffer = openBuffers[dictionaryID]

        problems_list = []
        warnings_list = []

        # Decode the data
        data = buffer_to_string(buffer)
        lexer = python_lexer.PythonLexer()
        lexer.input(data)

        #for token in lexer:
        #    print(token.value)

        lexer = python_lexer.PythonLexer()
        res = python_parser.parse_data(data, lexer)
        #log_time("AFTER PARSE DATA: ", time.time() * 1000, start_time)

        tree, del_parts = utils.traverse_ast_test(res)
        #log_time("AFTER TRAVERSE AST: ", time.time() * 1000, start_time)

        ast_tree = utils.parse_with_ast(res)
        #log_time("AFTER PARSE WITH AST: ", time.time() * 1000, start_time)

        parser = FinalParser(number_of_iterations)
        parser.eval_in_root(ast_tree)
        del_lines = []
        for delPart in del_parts:
            for i in range(delPart[0], delPart[1] + 1):
                del_lines.append(i)

        #log_time("AFTER EVAL IN ROOT: ", time.time() * 1000, start_time)

        #processing syntax problems
        for line in del_lines:
            p = []
            p.append(line)
            p.append('Invalid syntax.')
            problems_list.append(p)

        del_lines += res.emptyLinesNums
        temp = {line for line in del_lines}
        del_lines = [line for line in temp]
        del_lines.sort()

        list_of_used_lines = []

        #Problems
        for problem in parser.problems:
            if not hasattr(problem.node, 'processed'):
                problem.node.lineno = utils.getOriginLineNum(
                    problem.node.lineno, del_lines)
                problem.node.processed = 1

            if not (problem.node.lineno in list_of_used_lines):
                b = []
                b.append(problem.node.lineno)
                b.append(str(problem))
                problems_list.append(b)
                list_of_used_lines.append(problem.node.lineno)

        #Warnings
        for warning in parser.warnings:
            if not hasattr(warning.node, 'processed'):
                warning.node.lineno = utils.getOriginLineNum(
                    warning.node.lineno, del_lines)
                warning.node.processed = 1
            w = []
            w.append(warning.node.lineno)
            w.append(str(warning))
            warnings_list.append(w)

        log_to_file("Problems: " + str(problems_list))
        log_to_file("Warnings: " + str(warnings_list))
        log_to_file("Validation completed...")

        problems = json.dumps({
            'problems': problems_list,
            'warnings': warnings_list
        })
        #print("DUMPED THING: "+str(problems))
        host.respond(bytes(problems, "UTF-8"), client_address)

        host.validationRunning = False
        log_to_file('----------------------------')

    except IndentationError as error:
        log_to_file("Indentation error in parsing.")
        traceback.print_exc()

        indent_error = json.dumps({
            'message': error.msg,
            'line_number': error.lineno
        })
        host.respond(bytes(indent_error, "UTF-8"), client_address)

        host.validationRunning = False
    except python_parser.RobustParserError as error:
        log_to_file("Error in parsing: returning correct line number.")

        b = []
        b.append(error.data.lineno)
        b.append("invalid syntax")
        problems_list.append(b)

        problems = json.dumps({
            'problems': problems_list,
            'warnings': warnings_list
        })
        host.respond(bytes(problems, "UTF-8"), client_address)
        host.validationRunning = False
    except Exception as error:
        log_to_file("Error in parsing: ")
        traceback.print_exc()
        #connection.sendall(bytes("endOfValidation: "+error, "utf-8"))
        #host.respond(bytes("endOfValidation", "utf-8"), client_address)
        error_problems_response = json.dumps({'problems': [], 'warnings': []})
        host.respond(bytes(error_problems_response, "UTF-8"), client_address)
        host.validationRunning = False
Example #5
0
def parse_and_validate(host, dictionaryID, client_address, number_of_iterations):
    """
    Main method which evaluates whole code and sends respond with errors and warnings.
    """
    try:
        start_time = time.time() * 1000
        log_to_file("START OF VALIDATION: "+str(start_time)+", Number of iterations: "+str(number_of_iterations))
        
        buffer = openBuffers[dictionaryID]
        
        problems_list = []
        warnings_list = []        
        
        # Decode the data
        data = buffer_to_string(buffer)
        lexer = python_lexer.PythonLexer()
        lexer.input(data)
        
        #for token in lexer:
        #    print(token.value)
            
        lexer = python_lexer.PythonLexer()
        res = python_parser.parse_data(data,lexer)
        #log_time("AFTER PARSE DATA: ", time.time() * 1000, start_time)
        
        tree, del_parts = utils.traverse_ast_test(res)       
        #log_time("AFTER TRAVERSE AST: ", time.time() * 1000, start_time)  
        
        ast_tree = utils.parse_with_ast(res)   
        #log_time("AFTER PARSE WITH AST: ", time.time() * 1000, start_time)
        
        parser=FinalParser(number_of_iterations)
        parser.eval_in_root(ast_tree)
        del_lines = []
        for delPart in del_parts:
            for i in range(delPart[0],delPart[1]+1):
                del_lines.append(i)
                      
        #log_time("AFTER EVAL IN ROOT: ", time.time() * 1000, start_time)
        
        #processing syntax problems
        for line in del_lines:
            p = []
            p.append(line)
            p.append('Invalid syntax.')
            problems_list.append(p)
            
        del_lines+=res.emptyLinesNums
        temp={line for line in del_lines}
        del_lines = [line for line in temp]
        del_lines.sort()
        
        list_of_used_lines = []
        
        #Problems
        for problem in parser.problems:   
            if not hasattr(problem.node, 'processed'):
                problem.node.lineno=utils.getOriginLineNum(problem.node.lineno,del_lines)        
                problem.node.processed=1

            if not (problem.node.lineno in list_of_used_lines):
                b = []
                b.append(problem.node.lineno)
                b.append(str(problem))
                problems_list.append(b)
                list_of_used_lines.append(problem.node.lineno)
                
        #Warnings
        for warning in parser.warnings:
            if not hasattr(warning.node, 'processed'):
                warning.node.lineno=utils.getOriginLineNum(warning.node.lineno,del_lines)        
                warning.node.processed=1
            w = []
            w.append(warning.node.lineno)
            w.append(str(warning))
            warnings_list.append(w)
        
        log_to_file("Problems: "+str(problems_list))
        log_to_file("Warnings: "+str(warnings_list))
        log_to_file("Validation completed...")

        problems = json.dumps({'problems' : problems_list, 'warnings' : warnings_list})
        #print("DUMPED THING: "+str(problems))
        host.respond(bytes(problems, "UTF-8"), client_address)
        
        
        host.validationRunning = False
        log_to_file('----------------------------')
        
    except IndentationError as error:
        log_to_file("Indentation error in parsing.")
        traceback.print_exc()
        
        indent_error = json.dumps({'message' : error.msg, 'line_number' : error.lineno})
        host.respond(bytes(indent_error,"UTF-8"), client_address)
        
        host.validationRunning = False
    except python_parser.RobustParserError as error:
        log_to_file("Error in parsing: returning correct line number.")
        
        b = []
        b.append(error.data.lineno)
        b.append("invalid syntax")
        problems_list.append(b)
        
        problems = json.dumps({'problems' : problems_list, 'warnings' : warnings_list})
        host.respond(bytes(problems, "UTF-8"), client_address)
        host.validationRunning = False
    except Exception as error:
        log_to_file("Error in parsing: ")
        traceback.print_exc()
        #connection.sendall(bytes("endOfValidation: "+error, "utf-8"))
        #host.respond(bytes("endOfValidation", "utf-8"), client_address)
        error_problems_response = json.dumps({'problems' : [], 'warnings' : []})
        host.respond(bytes(error_problems_response, "UTF-8"), client_address)
        host.validationRunning = False