コード例 #1
0
ファイル: daemon.py プロジェクト: pynfer/pynfer
def update_given_file_only(current_list, path, rootDirectory, file, given_buffer_data):
    '''
    Same as iterate all on given path except this function iterates trough
    given buffer and appends these results to the output - especially useful
    since user might change stuff and then he wants correct feedback.
    '''

    finalRes = current_list
    
    data = buffer_to_string(given_buffer_data)
    lexer = python_lexer.PythonLexer()
    lexer.input(data)
    
    res = python_parser.parse_data(data,lexer)
    defAndClass = utils.node_to_defs(res)
    
    if defAndClass is not None:
        for item in defAndClass:
            checkExistence = findDictByKeyInListOfDicts(item[0], finalRes)
            if checkExistence is None:
                curLocation = path[path.rfind(rootDirectory):]
                newItem = { item[0] : (item[1], str(curLocation)+'/'+str(file))}
                finalRes.append(newItem)
            else:
                finalRes.remove(checkExistence)
                oldValue = checkExistence[item[0]]
                curLocation = path[path.rfind(rootDirectory):]
                if not str(curLocation)+'/'+str(file) in oldValue[1]:
                    newValue = (item[1], oldValue[1] + ','+ str(curLocation)+'/'+str(file))
                else:
                    newValue = (item[1], oldValue[1])
                     
                newItem = { item[0] : newValue}
                finalRes.append(newItem)
    return finalRes
コード例 #2
0
ファイル: Server.py プロジェクト: kapucko/dipl
def parse():

    data = bufferToString(buffer)
    print("DATA: ",data)
        #print("TOTO VALIDUJEM: "+ anArg)
        #data = anArg
    
    lexer = python_lexer.PythonLexer()
    lexer.input(data)
        #for token in lexer:
            #print(token.value)
    lexer = python_lexer.PythonLexer()
    res = python_parser.parse_data(data,lexer)
    
    tree, delParts = utils.traverse_ast_test(res)  
        
    try:
        ast_tree = utils.parse_with_ast(res)
            #print(utils.astNode_to_tree(ast_tree))
    except Exception as error:
        print("Error in part which that be valid: ",error)
    
    delLines = []
    for delPart in delParts:
        for i in range(delPart[0],delPart[1]+1):
                #tuto su veci
            delLines.append(i)
    
    delLines+=res.emptyLinesNums
    temp={line for line in delLines}
    delLines = [line for line in temp]
    delLines.sort()
    return (ast_tree, delLines)
コード例 #3
0
def iterate_all_on_given_path(path, rootDirectory, extension='.py'):
    '''
    Function iterates recursively trough all .py files under given root directory and 
    appends results to the list
    '''
    finalRes = []
    f = []
    d = []
    for (dirpath, dirnames, filenames) in os.walk(path):
        f.extend(filenames)
        d.extend(dirnames)
        break

    #Recursively call this function on all subdirectories
    for dir in d:
        recursiveOutput = iterate_all_on_given_path(path + '/' + str(dir),
                                                    rootDirectory)
        for item in recursiveOutput:
            finalRes.append(item)

    #Iterate all .py files in f
    for file in f:
        if str(file).endswith(extension):
            try:
                myfile = open(path + '/' + str(file))

                data = myfile.read()
                lexer = python_lexer.PythonLexer()
                lexer.input(data)

                res = python_parser.parse_data(data, lexer)
                defAndClass = utils.node_to_defs(res)

                if defAndClass is not None:
                    for item in defAndClass:
                        checkExistence = findDictByKeyInListOfDicts(
                            item[0], finalRes)
                        if checkExistence is None:
                            curLocation = path[path.rfind(rootDirectory):]
                            newItem = {
                                item[0]:
                                (item[1], str(curLocation) + '/' + str(file))
                            }
                            finalRes.append(newItem)
                        else:
                            finalRes.remove(checkExistence)
                            oldValue = checkExistence[item[0]]
                            curLocation = path[path.rfind(rootDirectory):]
                            newValue = (item[1], oldValue[1] + ',' +
                                        str(curLocation) + '/' + str(file))
                            newItem = {item[0]: newValue}
                            finalRes.append(newItem)
            except Exception as error:
                #print("Iterate error "+str(error))
                pass
    return finalRes
コード例 #4
0
ファイル: daemon.py プロジェクト: pynfer/pynfer
def iterate_all_on_given_path(path, rootDirectory, extension = '.py'):
    '''
    Function iterates recursively trough all .py files under given root directory and 
    appends results to the list
    '''
    finalRes = []
    f = []
    d = []
    for (dirpath, dirnames, filenames) in os.walk(path):
        f.extend(filenames)
        d.extend(dirnames)
        break
    
    #Recursively call this function on all subdirectories
    for dir in d:
        recursiveOutput = iterate_all_on_given_path(path+'/'+str(dir), rootDirectory)
        for item in recursiveOutput: 
            finalRes.append(item)
    
    #Iterate all .py files in f    
    for file in f:
        if str(file).endswith(extension):
            try:
                myfile = open(path + '/' + str(file))
                
                data = myfile.read()
                lexer = python_lexer.PythonLexer()
                lexer.input(data)
                
                res = python_parser.parse_data(data,lexer)
                defAndClass = utils.node_to_defs(res)
                
                if defAndClass is not None:
                    for item in defAndClass:
                        checkExistence = findDictByKeyInListOfDicts(item[0], finalRes)
                        if checkExistence is None:
                            curLocation = path[path.rfind(rootDirectory):]
                            newItem = { item[0] : (item[1], str(curLocation)+'/'+str(file))}
                            finalRes.append(newItem)
                        else:
                            finalRes.remove(checkExistence)
                            oldValue = checkExistence[item[0]]
                            curLocation = path[path.rfind(rootDirectory):]
                            newValue = (item[1], oldValue[1] + ','+ str(curLocation)+'/'+str(file))
                            newItem = { item[0] : newValue}
                            finalRes.append(newItem)
            except Exception as error:
                #print("Iterate error "+str(error))
                pass
    return finalRes
コード例 #5
0
ファイル: test_parsing_package.py プロジェクト: kapucko/dipl
def setup_test(test, filename):
    print("--------------------",test)
    f = open('inf/parsing/'+filename)
    data = f.read()
    f.close()
    gl={}
    lc={}
    lexer = python_lexer.PythonLexer()
    lexer.input(data)
    res=parser.parse_data(data, lexer)
    code=utils.node_to_str(res)
    test.gl=gl
    test.lc=lc
    test.code=code
    test.node=res
コード例 #6
0
def setup_test(test, filename):
    print("--------------------", test)
    f = open('inf/parsing/' + filename)
    data = f.read()
    f.close()
    gl = {}
    lc = {}
    lexer = python_lexer.PythonLexer()
    lexer.input(data)
    res = parser.parse_data(data, lexer)

    code = utils.node_to_str(res)
    anotherTest = utils.node_to_defs(res)
    test.gl = gl
    test.lc = lc
    test.code = code
    test.node = res
コード例 #7
0
def update_given_file_only(current_list, path, rootDirectory, file,
                           given_buffer_data):
    '''
    Same as iterate all on given path except this function iterates trough
    given buffer and appends these results to the output - especially useful
    since user might change stuff and then he wants correct feedback.
    '''

    finalRes = current_list

    data = buffer_to_string(given_buffer_data)
    lexer = python_lexer.PythonLexer()
    lexer.input(data)

    res = python_parser.parse_data(data, lexer)
    defAndClass = utils.node_to_defs(res)

    if defAndClass is not None:
        for item in defAndClass:
            checkExistence = findDictByKeyInListOfDicts(item[0], finalRes)
            if checkExistence is None:
                curLocation = path[path.rfind(rootDirectory):]
                newItem = {
                    item[0]: (item[1], str(curLocation) + '/' + str(file))
                }
                finalRes.append(newItem)
            else:
                finalRes.remove(checkExistence)
                oldValue = checkExistence[item[0]]
                curLocation = path[path.rfind(rootDirectory):]
                if not str(curLocation) + '/' + str(file) in oldValue[1]:
                    newValue = (item[1], oldValue[1] + ',' + str(curLocation) +
                                '/' + str(file))
                else:
                    newValue = (item[1], oldValue[1])

                newItem = {item[0]: newValue}
                finalRes.append(newItem)
    return finalRes
コード例 #8
0
ファイル: Server.py プロジェクト: kapucko/dipl
def parseAndValidate(connection):
    try:
        arrayOfProblems = None
        arrayOfProblems = []
        # Decode the data
        #data = anArg.decode("utf-8")
        data = bufferToString(buffer)
        print("DATA: ",data)
        #print("TOTO VALIDUJEM: "+ anArg)
        #data = anArg
    
        lexer = python_lexer.PythonLexer()
        lexer.input(data)
        #for token in lexer:
            #print(token.value)
        lexer = python_lexer.PythonLexer()
        res = python_parser.parse_data(data,lexer)
    
        tree, delParts = utils.traverse_ast_test(res)  
        
        try:
            ast_tree = utils.parse_with_ast(res)
            #print(utils.astNode_to_tree(ast_tree))
        except Exception as error:
            print("Error in part which that be valid: ",error)
    
        parser=Parser()
        module_scope=parser.eval_code(ast_tree)
        delLines = []
        for delPart in delParts:
            for i in range(delPart[0],delPart[1]+1):
                #tuto su veci
                delLines.append(i)
        
        #processing syntax problems
        for line in delLines:
            p = []
            p.append(line)
            p.append('invalid syntax')
            arrayOfProblems.append(p)
            
        delLines+=res.emptyLinesNums
        temp={line for line in delLines}
        delLines = [line for line in temp]
        delLines.sort()
        print("DEL LINES:", delLines)
        
        
        problem_symbols={problem for problem in parser.problems}
        
        listOfLines = []
        for problem in problem_symbols:   
            if not hasattr(problem.node, 'processed'):
                problem.node.lineno=utils.getOriginLineNum(problem.node.lineno,delLines)        
                problem.node.processed=1
            print('P: ', problem,'at line: ',problem.node.lineno)
            if not (problem.node.lineno in listOfLines):
                b = []
                b.append(problem.node.lineno)
                b.append(str(problem))
                arrayOfProblems.append(b)
                listOfLines.append(problem.node.lineno)
        
        #pickleProblems = pickleListOfProblems(arrayOfProblems)
        pickleProblems = pickleListOfProblems(arrayOfProblems)
        pickleProblemsStr = pickle.dumps(pickleProblems, 2)
        connection.sendall(pickleProblemsStr)
        return (ast_tree, delLines)
    except:
        traceback.print_exc()
        #sock.close()
        connection.sendall(bytes("endOfValidation", "utf-8"))
コード例 #9
0
def get_auto_completion(host, client_address, buffer_used, variable_line,
                        line_number):
    '''
    This method is called when user presses '.' symbol.
    
    We want to find all possible attributes for given symbol on given buffer at given line.
    
    Call eval_in_root plus send nodeAst - means evaluation will stop once given node is reached.
    '''
    try:
        #Get leading spaces so the indent matches current level
        variable_line = variable_line.replace('\t', '    ')
        leading_spaces = len(variable_line) - len(variable_line.lstrip())

        # Get only relevant part of line
        line_modified = utils.getObjectStringFromLine(variable_line)

        # Replace given line with string which will be resolved - much easier for our solution
        resolving_name = 'target_for_completion'
        buffer = buffer_used
        buffer[line_number - 1] = variable_line[
            0:leading_spaces] + resolving_name + ' = ' + line_modified
        buffer = buffer[0:line_number]

        # Parse modified buffer and eval
        data = buffer_to_string(buffer)
        lexer = python_lexer.PythonLexer()
        lexer.input(data)
        lexer = python_lexer.PythonLexer()
        res = python_parser.parse_data(data, lexer)

        tree, del_parts = utils.traverse_ast_test(res)
        ast_tree = utils.parse_with_ast(res)
        #        print("TREE:"+str(utils.astNode_to_tree(ast_tree)))

        del_lines = []
        for delPart in del_parts:
            for i in range(delPart[0], delPart[1] + 1):
                del_lines.append(i)

        del_lines += res.emptyLinesNums
        temp = {line for line in del_lines}
        del_lines = [line for line in temp]
        del_lines.sort()

        current_line_number = utils.getCurrentLineNum(line_number, del_lines)
        parser = FinalParser(1)
        parser.eval_in_root(ast_tree)

        #        print(str(parser.scopes[0]))

        #Remove inf_ attribtues since those are used for internal purposes
        list_of_all = parser.get_all_possible_attr(resolving_name)
        reduced_list = []
        for item in list_of_all:
            if not item[0].startswith('inf_'):
                reduced_list.append(item)

        # Respond to the client.
        response_completion = json.dumps({'options': reduced_list})
        host.respond(bytes(response_completion, 'UTF-8'), client_address)
    except:
        traceback.print_exc()
        # Send an empty list if any error occurred
        list_for_completion = []
        response_completion_error = json.dumps(
            {'options': list_for_completion})
        host.respond(bytes(response_completion_error, 'UTF-8'), client_address)
コード例 #10
0
def parse_and_validate(host, dictionaryID, client_address,
                       number_of_iterations):
    """
    Main method which evaluates whole code and sends respond with errors and warnings.
    """
    try:
        start_time = time.time() * 1000
        log_to_file("START OF VALIDATION: " + str(start_time) +
                    ", Number of iterations: " + str(number_of_iterations))

        buffer = openBuffers[dictionaryID]

        problems_list = []
        warnings_list = []

        # Decode the data
        data = buffer_to_string(buffer)
        lexer = python_lexer.PythonLexer()
        lexer.input(data)

        #for token in lexer:
        #    print(token.value)

        lexer = python_lexer.PythonLexer()
        res = python_parser.parse_data(data, lexer)
        #log_time("AFTER PARSE DATA: ", time.time() * 1000, start_time)

        tree, del_parts = utils.traverse_ast_test(res)
        #log_time("AFTER TRAVERSE AST: ", time.time() * 1000, start_time)

        ast_tree = utils.parse_with_ast(res)
        #log_time("AFTER PARSE WITH AST: ", time.time() * 1000, start_time)

        parser = FinalParser(number_of_iterations)
        parser.eval_in_root(ast_tree)
        del_lines = []
        for delPart in del_parts:
            for i in range(delPart[0], delPart[1] + 1):
                del_lines.append(i)

        #log_time("AFTER EVAL IN ROOT: ", time.time() * 1000, start_time)

        #processing syntax problems
        for line in del_lines:
            p = []
            p.append(line)
            p.append('Invalid syntax.')
            problems_list.append(p)

        del_lines += res.emptyLinesNums
        temp = {line for line in del_lines}
        del_lines = [line for line in temp]
        del_lines.sort()

        list_of_used_lines = []

        #Problems
        for problem in parser.problems:
            if not hasattr(problem.node, 'processed'):
                problem.node.lineno = utils.getOriginLineNum(
                    problem.node.lineno, del_lines)
                problem.node.processed = 1

            if not (problem.node.lineno in list_of_used_lines):
                b = []
                b.append(problem.node.lineno)
                b.append(str(problem))
                problems_list.append(b)
                list_of_used_lines.append(problem.node.lineno)

        #Warnings
        for warning in parser.warnings:
            if not hasattr(warning.node, 'processed'):
                warning.node.lineno = utils.getOriginLineNum(
                    warning.node.lineno, del_lines)
                warning.node.processed = 1
            w = []
            w.append(warning.node.lineno)
            w.append(str(warning))
            warnings_list.append(w)

        log_to_file("Problems: " + str(problems_list))
        log_to_file("Warnings: " + str(warnings_list))
        log_to_file("Validation completed...")

        problems = json.dumps({
            'problems': problems_list,
            'warnings': warnings_list
        })
        #print("DUMPED THING: "+str(problems))
        host.respond(bytes(problems, "UTF-8"), client_address)

        host.validationRunning = False
        log_to_file('----------------------------')

    except IndentationError as error:
        log_to_file("Indentation error in parsing.")
        traceback.print_exc()

        indent_error = json.dumps({
            'message': error.msg,
            'line_number': error.lineno
        })
        host.respond(bytes(indent_error, "UTF-8"), client_address)

        host.validationRunning = False
    except python_parser.RobustParserError as error:
        log_to_file("Error in parsing: returning correct line number.")

        b = []
        b.append(error.data.lineno)
        b.append("invalid syntax")
        problems_list.append(b)

        problems = json.dumps({
            'problems': problems_list,
            'warnings': warnings_list
        })
        host.respond(bytes(problems, "UTF-8"), client_address)
        host.validationRunning = False
    except Exception as error:
        log_to_file("Error in parsing: ")
        traceback.print_exc()
        #connection.sendall(bytes("endOfValidation: "+error, "utf-8"))
        #host.respond(bytes("endOfValidation", "utf-8"), client_address)
        error_problems_response = json.dumps({'problems': [], 'warnings': []})
        host.respond(bytes(error_problems_response, "UTF-8"), client_address)
        host.validationRunning = False
コード例 #11
0
ファイル: daemon.py プロジェクト: pynfer/pynfer
def get_auto_completion(host, client_address, buffer_used, variable_line, line_number):
    '''
    This method is called when user presses '.' symbol.
    
    We want to find all possible attributes for given symbol on given buffer at given line.
    
    Call eval_in_root plus send nodeAst - means evaluation will stop once given node is reached.
    '''
    try:
        #Get leading spaces so the indent matches current level
        variable_line = variable_line.replace('\t', '    ')
        leading_spaces = len(variable_line) - len(variable_line.lstrip())
        
        # Get only relevant part of line
        line_modified = utils.getObjectStringFromLine(variable_line)
        
        # Replace given line with string which will be resolved - much easier for our solution
        resolving_name = 'target_for_completion'
        buffer = buffer_used
        buffer[line_number - 1] = variable_line[0:leading_spaces] + resolving_name + ' = ' + line_modified
        buffer = buffer[0:line_number]
        
        # Parse modified buffer and eval 
        data = buffer_to_string(buffer)
        lexer = python_lexer.PythonLexer()
        lexer.input(data)
        lexer = python_lexer.PythonLexer()
        res = python_parser.parse_data(data,lexer)
        
        tree, del_parts = utils.traverse_ast_test(res)
        ast_tree = utils.parse_with_ast(res)  
#        print("TREE:"+str(utils.astNode_to_tree(ast_tree)))

        del_lines = []
        for delPart in del_parts:
            for i in range(delPart[0],delPart[1]+1):
                del_lines.append(i) 
        
        del_lines+=res.emptyLinesNums
        temp={line for line in del_lines}
        del_lines = [line for line in temp]
        del_lines.sort()
        
        current_line_number = utils.getCurrentLineNum(line_number, del_lines)
        parser = FinalParser(1)
        parser.eval_in_root(ast_tree)
        
#        print(str(parser.scopes[0]))
        
        #Remove inf_ attribtues since those are used for internal purposes
        list_of_all = parser.get_all_possible_attr(resolving_name)
        reduced_list = []
        for item in list_of_all:
            if not item[0].startswith('inf_'):
                reduced_list.append(item)
        
        # Respond to the client.
        response_completion = json.dumps({'options' : reduced_list})
        host.respond(bytes(response_completion, 'UTF-8'), client_address)
    except:
        traceback.print_exc()
        # Send an empty list if any error occurred
        list_for_completion = []
        response_completion_error = json.dumps({'options' : list_for_completion})
        host.respond(bytes(response_completion_error, 'UTF-8'), client_address)
コード例 #12
0
ファイル: daemon.py プロジェクト: pynfer/pynfer
def parse_and_validate(host, dictionaryID, client_address, number_of_iterations):
    """
    Main method which evaluates whole code and sends respond with errors and warnings.
    """
    try:
        start_time = time.time() * 1000
        log_to_file("START OF VALIDATION: "+str(start_time)+", Number of iterations: "+str(number_of_iterations))
        
        buffer = openBuffers[dictionaryID]
        
        problems_list = []
        warnings_list = []        
        
        # Decode the data
        data = buffer_to_string(buffer)
        lexer = python_lexer.PythonLexer()
        lexer.input(data)
        
        #for token in lexer:
        #    print(token.value)
            
        lexer = python_lexer.PythonLexer()
        res = python_parser.parse_data(data,lexer)
        #log_time("AFTER PARSE DATA: ", time.time() * 1000, start_time)
        
        tree, del_parts = utils.traverse_ast_test(res)       
        #log_time("AFTER TRAVERSE AST: ", time.time() * 1000, start_time)  
        
        ast_tree = utils.parse_with_ast(res)   
        #log_time("AFTER PARSE WITH AST: ", time.time() * 1000, start_time)
        
        parser=FinalParser(number_of_iterations)
        parser.eval_in_root(ast_tree)
        del_lines = []
        for delPart in del_parts:
            for i in range(delPart[0],delPart[1]+1):
                del_lines.append(i)
                      
        #log_time("AFTER EVAL IN ROOT: ", time.time() * 1000, start_time)
        
        #processing syntax problems
        for line in del_lines:
            p = []
            p.append(line)
            p.append('Invalid syntax.')
            problems_list.append(p)
            
        del_lines+=res.emptyLinesNums
        temp={line for line in del_lines}
        del_lines = [line for line in temp]
        del_lines.sort()
        
        list_of_used_lines = []
        
        #Problems
        for problem in parser.problems:   
            if not hasattr(problem.node, 'processed'):
                problem.node.lineno=utils.getOriginLineNum(problem.node.lineno,del_lines)        
                problem.node.processed=1

            if not (problem.node.lineno in list_of_used_lines):
                b = []
                b.append(problem.node.lineno)
                b.append(str(problem))
                problems_list.append(b)
                list_of_used_lines.append(problem.node.lineno)
                
        #Warnings
        for warning in parser.warnings:
            if not hasattr(warning.node, 'processed'):
                warning.node.lineno=utils.getOriginLineNum(warning.node.lineno,del_lines)        
                warning.node.processed=1
            w = []
            w.append(warning.node.lineno)
            w.append(str(warning))
            warnings_list.append(w)
        
        log_to_file("Problems: "+str(problems_list))
        log_to_file("Warnings: "+str(warnings_list))
        log_to_file("Validation completed...")

        problems = json.dumps({'problems' : problems_list, 'warnings' : warnings_list})
        #print("DUMPED THING: "+str(problems))
        host.respond(bytes(problems, "UTF-8"), client_address)
        
        
        host.validationRunning = False
        log_to_file('----------------------------')
        
    except IndentationError as error:
        log_to_file("Indentation error in parsing.")
        traceback.print_exc()
        
        indent_error = json.dumps({'message' : error.msg, 'line_number' : error.lineno})
        host.respond(bytes(indent_error,"UTF-8"), client_address)
        
        host.validationRunning = False
    except python_parser.RobustParserError as error:
        log_to_file("Error in parsing: returning correct line number.")
        
        b = []
        b.append(error.data.lineno)
        b.append("invalid syntax")
        problems_list.append(b)
        
        problems = json.dumps({'problems' : problems_list, 'warnings' : warnings_list})
        host.respond(bytes(problems, "UTF-8"), client_address)
        host.validationRunning = False
    except Exception as error:
        log_to_file("Error in parsing: ")
        traceback.print_exc()
        #connection.sendall(bytes("endOfValidation: "+error, "utf-8"))
        #host.respond(bytes("endOfValidation", "utf-8"), client_address)
        error_problems_response = json.dumps({'problems' : [], 'warnings' : []})
        host.respond(bytes(error_problems_response, "UTF-8"), client_address)
        host.validationRunning = False