Ejemplo n.º 1
0
    def __init__(self,
                 source_class="Playground",
                 source_method="DeliveryDate",
                 argument_name="b",
                 main_file="playground.java"):
        """create a removeflagargument refactor 

        Args:
            source_class (str): class name contaminated by code smell.
            source_mathod (str): method name contaminated.
            argument_name (str): boolean argument in method.
            main_file (str): path of main file containing source class.
        """

        self.source_class = source_class
        self.source_method = source_method
        self.arguemnt_name = argument_name
        self.main_file = main_file

        self.stream = FileStream(self.main_file,
                                 encoding='utf8',
                                 errors='ignore')
        self.lexer = JavaLexer(self.stream)
        self.token_stream = CommonTokenStream(self.lexer)
        self.parser = JavaParserLabeled(self.token_stream)
        self.parser.getTokenStream()
        self.parse_tree = self.parser.compilationUnit()
        self.my_listener = RemoveFlagArgumentListener(
            common_token_stream=self.token_stream,
            source_class=self.source_class,
            source_method=self.source_method,
            argument_name=self.arguemnt_name)
Ejemplo n.º 2
0
def main(directory_path, package_name, source_class, field_name):
    for root, dirs, files in os.walk(directory_path):
        for file in files:
            if file.endswith('.java'):
                stream = FileStream(os.path.join(root, file),
                                    encoding='utf8',
                                    errors='ignore')
                lexer = JavaLexer(stream)
                token_stream = CommonTokenStream(lexer)
                parser = JavaParserLabeled(token_stream)
                ef_listener = EncapsulateFiledRefactoringListener(
                    token_stream, package_name, source_class, field_name)
                tree = parser.compilationUnit()
                walker = ParseTreeWalker()
                walker.walk(t=tree, listener=ef_listener)

                ip_listener = InstancePropagationEncapsulateFieldListener(
                    ef_listener.token_stream_rewriter, package_name,
                    source_class, field_name)
                walker.walk(t=tree, listener=ip_listener)

                refactored = open(os.path.join(root, file), 'w', newline='')
                refactored.write(
                    ip_listener.token_stream_rewriter.getDefaultText())
                refactored.close()

    return True
def main():
    # folder_path = "..\\benchmark_projects\\JSON\\src\\main\\java\\org\\json"
    folder_path = "../../tests/replace_constructor_with_factory_function_tests"
    path = os.listdir(folder_path)
    # target_class = "CDL"
    target_class = "Employee"

    for file in path:
        if file.endswith('.java') and not file.endswith('_refactored.java'):
            each = folder_path + "\\" + file
            stream = FileStream(str(each))
            lexer = JavaLexer(stream)
            tokens = CommonTokenStream(lexer)
            parser = JavaParserLabeled(tokens)
            tree = parser.compilationUnit()
            new_file = open(os.path.join(folder_path,
                                         file + "_refactored.java"),
                            mode='w',
                            newline='')
            listener = ReplaceConstructorWithFactoryFunctionRefactoringListener(
                common_token_stream=tokens, target_class=target_class)
            walker = ParseTreeWalker()
            walker.walk(listener=listener, t=tree)
            new_code = str(listener.codeRewrite.getDefaultText())
            new_file.write(new_code)
Ejemplo n.º 4
0
 def __init__(self,
              udb_path,
              file_path,
              source_class,
              new_class,
              moved_fields,
              moved_methods,
              new_file_path=None):
     self.file_path = file_path
     self.udb_path = udb_path
     self.new_file_path = new_file_path
     self.source_class = source_class
     self.new_class = new_class
     self.moved_fields = moved_fields
     self.moved_methods = moved_methods
     self.stream = FileStream(self.file_path,
                              encoding="utf-8",
                              errors='ignore')
     self.lexer = JavaLexer(self.stream)
     self.token_stream = CommonTokenStream(self.lexer)
     self.parser = JavaParserLabeled(self.token_stream)
     self.tree = self.parser.compilationUnit()
     self.walker = ParseTreeWalker()
     self.method_usage_map = {}
     self.pass_this = False
     self.TAB = "\t"
     self.object_name = ""
Ejemplo n.º 5
0
def main(args):
    # Step 1: Load input source into stream
    stream = FileStream(args.file, encoding='utf8', errors='ignore')
    # Step 2: Create an instance of AssignmentStLexer
    lexer = JavaLexer(stream)
    # Step 3: Convert the input source into a list of tokens
    token_stream = CommonTokenStream(lexer)
    # Step 4: Create an instance of the AssignmentStParser
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    # Step 5: Create parse tree
    parse_tree = parser.compilationUnit()
    # Step 6: Create an instance of the refactoringListener, and send as a parameter the list of tokens to the class
    # my_listener = EncapsulateFiledRefactoringListener(common_token_stream=token_stream, class_identifier='A')
    my_listener = SingletonRefactoringListener(
        common_token_stream=token_stream, class_identifier='GeneralPurposeBit')
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)

    newpath = "Refactored" + args.file
    if not os.path.exists(os.path.dirname(newpath)):
        try:
            os.makedirs(os.path.dirname(newpath))
        except OSError as exc:  # Guard against race condition
            pass
    with open(newpath, mode='w', newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())
 def __init__(self, file_path, target_class, target_method,
              target_parameters):
     self.file_path = file_path
     self.new_file_path = file_path
     self.target_class = target_class
     self.target_method = target_method
     self.target_parameters = target_parameters
     self.stream = FileStream(self.file_path,
                              encoding="utf8",
                              errors='ignore')
     self.lexer = JavaLexer(self.stream)
     self.token_stream = CommonTokenStream(self.lexer)
     self.parser = JavaParserLabeled(self.token_stream)
     self.tree = self.parser.compilationUnit()
     self.walker = ParseTreeWalker()
Ejemplo n.º 7
0
class RemoveFlagArgument:
    """Refactoring API that can be used to to do remove flag argument 

    """
    def __init__(self,
                 source_class="Playground",
                 source_method="DeliveryDate",
                 argument_name="b",
                 main_file="playground.java"):
        """create a removeflagargument refactor 

        Args:
            source_class (str): class name contaminated by code smell.
            source_mathod (str): method name contaminated.
            argument_name (str): boolean argument in method.
            main_file (str): path of main file containing source class.
        """

        self.source_class = source_class
        self.source_method = source_method
        self.arguemnt_name = argument_name
        self.main_file = main_file

        self.stream = FileStream(self.main_file,
                                 encoding='utf8',
                                 errors='ignore')
        self.lexer = JavaLexer(self.stream)
        self.token_stream = CommonTokenStream(self.lexer)
        self.parser = JavaParserLabeled(self.token_stream)
        self.parser.getTokenStream()
        self.parse_tree = self.parser.compilationUnit()
        self.my_listener = RemoveFlagArgumentListener(
            common_token_stream=self.token_stream,
            source_class=self.source_class,
            source_method=self.source_method,
            argument_name=self.arguemnt_name)

    def do_refactor(self):
        """removes flag argument logic and replace it by two method call of the new method generated from extracted
            login in if else block
        """
        walker = ParseTreeWalker()
        walker.walk(t=self.parse_tree, listener=self.my_listener)

        # self.my_listener.body_1

        with open(self.main_file, 'w') as f:
            f.write(self.my_listener.token_stream_rewriter.getDefaultText())
class ReplaceParameterWithQueryAPI:
    def __init__(self, file_path, target_class, target_method,
                 target_parameters):
        self.file_path = file_path
        self.new_file_path = file_path
        self.target_class = target_class
        self.target_method = target_method
        self.target_parameters = target_parameters
        self.stream = FileStream(self.file_path,
                                 encoding="utf8",
                                 errors='ignore')
        self.lexer = JavaLexer(self.stream)
        self.token_stream = CommonTokenStream(self.lexer)
        self.parser = JavaParserLabeled(self.token_stream)
        self.tree = self.parser.compilationUnit()
        self.walker = ParseTreeWalker()

    def do_refactor(self):
        listener = ReplaceParameterWithQueryRefactoringListener(
            common_token_stream=self.token_stream,
            target_class=self.target_class,
            target_method=self.target_method,
            target_parameters=self.target_parameters)
        self.walker.walk(listener=listener, t=self.tree)

        print(listener.add_to_top_of_target_method)
        print(listener.token_stream_rewriter.getDefaultText())

        print(type(self.new_file_path))

        with open(self.new_file_path, mode="w", newline="") as f:
            f.write(listener.token_stream_rewriter.getDefaultText())
Ejemplo n.º 9
0
def main():
    Path = "../../tests/rename_tests/benchmark"
    Package_name = "org.json"
    class_identifier = "CDL"
    new_class_name = "test"

    FolderPath = os.listdir(Path)
    testsPath = os.listdir(Path + "/refactoredFiles/")

    # delete last refactored files
    for t in testsPath:
        os.remove(os.path.join(Path + "/refactoredFiles/", t))

    for File in FolderPath:
        # We have all of the java files in this folder now
        if File.endswith('.java'):
            EachFilePath = Path + "/" + File
            print(" ****************" + " in file : " + File + " ****************")
            EachFile = FileStream(str(EachFilePath))
            FileName = File.split(".")[0]
            Refactored = open(Path + "/refactoredFiles/" + FileName + "_Refactored.java", 'w', newline='')

            Lexer = JavaLexer(EachFile)

            TokenStream = CommonTokenStream(Lexer)

            Parser = JavaParserLabeled(TokenStream)

            Tree = Parser.compilationUnit()

            ListenerForReRenameClass =\
                RenameClassRefactoringListener(TokenStream, Package_name, class_identifier, new_class_name)

            Walker = ParseTreeWalker()

            Walker.walk(ListenerForReRenameClass, Tree)

            Refactored.write(ListenerForReRenameClass.token_stream_rewriter.getDefaultText())

    print("changing public class files name... ")
    for i in range(len(old_names)):
        os.rename(Path + "/refactoredFiles/" + old_names[i] + "_Refactored.java",
                  Path + "/refactoredFiles/" + new_names[i] + "_Refactored.java")

    print(" %%%%%%%%%%%%%" + " all files finished " + "****************")
Ejemplo n.º 10
0
    def add_implement_statement_to_class(self, ):
        stream = FileStream(self.class_path, encoding='utf8', errors='ignore')
        lexer = JavaLexer(stream)
        token_stream = CommonTokenStream(lexer)
        parser = JavaParserLabeled(token_stream)
        parser.getTokenStream()
        parse_tree = parser.compilationUnit()
        listener = AddingImplementStatementToClass(
            common_token_stream=token_stream,
            class_name=os.path.splitext(os.path.basename(self.class_path))[0],
            interface_package=self.interface_info['package'],
            interface_name=self.interface_info['name']
        )
        walker = ParseTreeWalker()
        walker.walk(t=parse_tree, listener=listener)

        with open(self.class_path, encoding='utf8', errors='ignore', mode='w', newline='') as f:
            f.write(listener.token_stream_rewriter.getDefaultText())
Ejemplo n.º 11
0
def main(udb_path, source_class, field_name, *args, **kwargs):
    """

    Main API for make field non-static

    """

    main_file = None
    db = und.open(udb_path)
    classes = db.ents("Class")
    for cls in classes:
        if cls.simplename() == source_class:
            if cls.parent() is not None:
                temp_file = str(cls.parent().longname(True))
                if os.path.isfile(temp_file):
                    main_file = temp_file
                    break

    if main_file is None:
        db.close()
        return False

    db.close()
    stream = FileStream(main_file, encoding='utf8', errors='ignore')
    lexer = JavaLexer(stream)
    token_stream = CommonTokenStream(lexer)
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    parse_tree = parser.compilationUnit()
    my_listener = MakeFieldNonStaticRefactoringListener(
        common_token_stream=token_stream,
        source_class=source_class,
        field_name=field_name)
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)

    with open(main_file,
              mode='w',
              encoding='utf8',
              errors='ignore',
              newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())

    return True
Ejemplo n.º 12
0
    def propagate_fields(self, usages):
        for usage in usages:
            file_path = usage.pop('file_path')
            stream = FileStream(file_path, encoding='utf-8', errors='ignore')
            lexer = JavaLexer(stream)
            token_stream = CommonTokenStream(lexer)
            parser = JavaParserLabeled(token_stream)
            parse_tree = parser.compilationUnit()
            my_listener = PropagateFieldUsageListener(
                common_token_stream=token_stream,
                object_name=self.object_name,
                **usage)
            walker = ParseTreeWalker()
            walker.walk(t=parse_tree, listener=my_listener)

            # print(my_listener.token_stream_rewriter.getDefaultText())
            with open(file_path, mode='w', encoding='utf-8',
                      errors='ignore') as f:
                f.write(my_listener.token_stream_rewriter.getDefaultText())
            self.reformat(file_path)
Ejemplo n.º 13
0
def main(args):
    # Step 1: Load input source into stream
    begin_time = time()
    stream = FileStream(args.file, encoding='utf8', errors='ignore')
    # input_stream = StdinStream()
    print('Input stream:')
    print(stream)

    # Step 2: Create an instance of AssignmentStLexer
    lexer = JavaLexer(stream)
    # Step 3: Convert the input source into a list of tokens
    token_stream = CommonTokenStream(lexer)
    # Step 4: Create an instance of the AssignmentStParser
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    # Step 5: Create parse tree
    parse_tree = parser.compilationUnit()
    # Step 6: Create an instance of the refactoringListener, and send as a parameter the list of tokens to the class
    my_listener = VisitorPatternRefactoringListener(
        common_token_stream=token_stream,
        SuperClass_identifier='SC',
        SubClass_identifier=['CC1', 'CC2', 'CC3']
    )
    #  SuperClass_identifier='ComputerPart',
    #  SubClass_identifier=['Keyboard', 'Monitor', 'Mouse', 'Computer'])
    #  SuperClass_identifier='Shape',
    #  SubClass_identifier=['Polygon', 'Rectangle','Arrow'])
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)

    print('Compiler result:')
    print(my_listener.token_stream_rewriter.getDefaultText())

    with open('../../tests/visitor1/VisitorExample0.refactored.java', mode='w', newline='') as f:
        #   with open('VisitorExample1.refactored.java', mode='w', newline='') as f:
        #    with open('VisitorExample2.refactored.java', mode='w', newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())

    end_time = time()
    print("time execution : ", end_time - begin_time)
Ejemplo n.º 14
0
def main():
    path_ = "../../tests/rename_tests/benchmark"
    package_name_ = "org.json"
    class_identifier_ = "HTTP"
    field_identifier_ = "CRLF"
    field_new_name_ = "test"

    folder_path = os.listdir(path_)
    tests_path = os.listdir(path_ + "/refactoredFiles/")

    # delete last refactored files
    for t in tests_path:
        os.remove(os.path.join(path_ + "/refactoredFiles/", t))

    for file_ in folder_path:
        # We have all java files in this folder now
        if file_.endswith('.java'):
            file_path = path_ + "/" + file_
            file_stream = FileStream(str(file_path))
            file_name = file_.split(".")[0]
            refactored = open(path_ + "/refactoredFiles/" + file_name +
                              "_Refactored.java",
                              'w',
                              newline='')

            lexer = JavaLexer(file_stream)
            token_stream = CommonTokenStream(lexer)
            parser = JavaParserLabeled(token_stream)
            tree = parser.compilationUnit()
            rename_field_refactoring_listener = RenameFieldRefactoringListener(
                token_stream, package_name_, class_identifier_,
                field_identifier_, field_new_name_)

            walker = ParseTreeWalker()
            walker.walk(rename_field_refactoring_listener, tree)
            refactored.write(rename_field_refactoring_listener.
                             token_stream_rewriter.getDefaultText())

    print(" %%%%%%%%%%%%%" + " all files finished " + "****************")
Ejemplo n.º 15
0
def rename_method(java_file_path,
                  scope_class_name,
                  target_method_name,
                  new_name,
                  reference=None):
    """Main Entry Point to the Listener and Tree Walker

    Args:
        java_file_path(str): Address path to the test/source file

        scope_class_name(str): Name of the class in which the refactoring has to be done

        target_method_name(str): Name of the method in which the refactoring has to be done

        new_name(str): The new name of the refactored method

        reference(str): Keeping track for all of the method references in the project scope

    Returns:
        No Returns
   """
    stream = FileStream(java_file_path)
    lexer = JavaLexer(stream)
    tokens = CommonTokenStream(lexer)
    parser = JavaParserLabeled(tokens)
    tree = parser.compilationUnit()
    listener = RenameMethodListener(java_file_path=java_file_path,
                                    common_token_stream=tokens,
                                    scope_class_name=scope_class_name,
                                    target_method_name=target_method_name,
                                    new_name=new_name,
                                    reference=reference)
    walker = ParseTreeWalker()
    walker.walk(listener, tree)
    if listener.changed:
        # print(java_file_path)
        new_file = open(file=java_file_path, mode='w')
        new_file.write(listener.token_stream_rewriter.getDefaultText().replace(
            '\r', ''))
Ejemplo n.º 16
0
def main(args):
    # Step 1: Load input source into stream
    begin_time = time()
    stream = FileStream(args.file, encoding='utf8', errors='ignore')
    # input_stream = StdinStream()
    print('Input stream:')
    print(stream)

    # Step 2: Create an instance of AssignmentStLexer
    lexer = JavaLexer(stream)
    # Step 3: Convert the input source into a list of tokens
    token_stream = CommonTokenStream(lexer)
    # Step 4: Create an instance of the AssignmentStParser
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    # Step 5: Create parse tree
    parse_tree = parser.compilationUnit()
    # Step 6: Create an instance of the refactoringListener, and send as a parameter the list of tokens to the class
    my_listener = StrategyPatternRefactoringListener(
        common_token_stream=token_stream, method_identifier='execute')
    #                                                     method_identifier='read')
    #                                                     method_identifier='write')

    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)

    print('Compiler result:')
    print(my_listener.token_stream_rewriter.getDefaultText())

    with open('../../tests/strategy1/StrategyExample0.refactored.java',
              mode='w',
              newline='') as f:
        #    with open('StrategyExample1.refactored.java', mode='w', newline='') as f:
        #    with open('StrategyExample2.refactored.java', mode='w', newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())

    end_time = time()
    print("execute time : ", end_time - begin_time)
Ejemplo n.º 17
0
def main(args):
    # Step 1: Load input source into stream
    stream = FileStream(args.file, encoding='utf8', errors='ignore')
    # input_stream = StdinStream()

    # Step 2: Create an instance of AssignmentStLexer
    lexer = JavaLexer(stream)
    # Step 3: Convert the input source into a list of tokens
    token_stream = CommonTokenStream(lexer)
    # Step 4: Create an instance of the AssignmentStParser
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    # Step 5: Create parse tree
    parse_tree = parser.compilationUnit()
    # Step 6: Create an instance of the refactoringListener, and send as a parameter the list of tokens to the class
    my_listener = FactoryMethodRefactoringListener(common_token_stream=token_stream,
                                                   creator_identifier='FactoryMethod',
                                                   products_identifier=['JpegReader', 'GifReader'])
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)

    with open('../../tests/factory1/FactoryExample.refactored.java', mode='w', newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())
Ejemplo n.º 18
0
def main(args):
    # Step 1: Load input source into stream
    stream = FileStream(args.file, encoding='utf8', errors='ignore')
    # input_stream = StdinStream()

    # Step 2: Create an instance of AssignmentStLexer
    # lexer = Java9_v2Lexer(stream)  # Deprecated
    lexer = JavaLexer(stream)  # Current lexer

    # Step 3: Convert the input source into a list of tokens
    common_token_stream = CommonTokenStream(lexer)

    # Step 4: Create an instance of the AssignmentStParser
    # parser = Java9_v2Parser(common_token_stream)  # Deprecated
    parser = JavaParserLabeled(common_token_stream)  # Current parser
    # parser.getTokenStream()

    # Step 5: Create parse tree
    # 5.1. Python backend --> Low speed
    parse_tree = parser.compilationUnit()

    # 5.2. C++ backend --> high speed
    # parse_tree = sa_java9_v2.parse(stream, 'compilationUnit', None) # Deprecated

    # Step 6: Create an instance of AssignmentStListener
    my_listener = EncapsulateFiledRefactoringListener(
        common_token_stream=common_token_stream, field_identifier='f')
    # my_listener = ExtractClassRefactoringListener(common_token_stream=token_stream, class_identifier='Worker')

    # return
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)

    with open('../tests/encapsulate_field_tests/input.refactored.java',
              mode='w',
              newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())
Ejemplo n.º 19
0
def main(class_path):
    """

    Args:

        class_path (str): The java file path containing the public class

    """

    # Precondition 1: The interface should not be already exist.
    interface_path = os.path.join(
        os.path.dirname(class_path),
        f'I{os.path.splitext(os.path.basename(class_path))[0]}.java'
    )
    if os.path.exists(interface_path):
        return False

    stream = FileStream(class_path, encoding='utf-8', errors='ignore')
    lexer = JavaLexer(stream)
    tokens = CommonTokenStream(lexer)
    parser = JavaParserLabeled(tokens)
    tree = parser.compilationUnit()

    listener = InterfaceInfoListener()

    walker = ParseTreeWalker()
    walker.walk(listener=listener, t=tree)

    interface_info_ = listener.get_interface_info()
    interface_info_['name'] = 'I' + interface_info_['name']
    interface_info_['path'] = os.path.dirname(class_path)

    ic = InterfaceCreator(interface_info_, class_path)
    ic.add_implement_statement_to_class()
    ic.save()
    return True
Ejemplo n.º 20
0
def extract_method(conf):
    stream = FileStream(conf['target_file'], encoding="utf-8", errors='ignore')
    lexer = JavaLexer(stream)
    tokens = CommonTokenStream(lexer)
    parser = JavaParserLabeled(tokens)
    tree = parser.compilationUnit()
    listener = ExtractMethodRefactoring(
        list(map(int, list(conf['lines'].keys()))))
    walker = ParseTreeWalker()
    walker.walk(listener=listener, t=tree)

    if not listener.is_result_valid:
        raise Exception('Some problem happened!')

    if listener.methods_name.__contains__(conf['new_method_name']):
        raise Exception('New method name already exists.')

    output = []
    file1 = open(conf['target_file'], 'r', encoding="utf-8", errors='ignore')
    lines = file1.readlines()
    line_num = 1
    # func_added = False
    func = []
    print('extracting following lines:')
    for line in lines:
        if listener.lines.__contains__(line_num):
            print(line, end='')
            if line_num == listener.last_line:
                output.append(
                    get_tabs(line) + listener.get_write_variable() +
                    conf['new_method_name'] + listener.get_args(False) + '\n')
            func.append(line)
            if conf['lines'][line_num]:
                output.append(line)
        elif line_num == listener.method_stop_line:
            output.append(line)
            output.append(
                get_tabs(line) + 'private ' +
                ('static ' if listener.is_target_method_static else '') +
                (listener.return_variable_type if listener.
                 return_variable_type else 'void') + ' ' +
                conf['new_method_name'] + listener.get_args(True) +
                ((' throws ' +
                  listener.exception_thrown_in_target_method) if listener.
                 exception_thrown_in_target_method is not None else '') + '\n')
            output.append(get_tabs(line) + '{\n')
            for item in listener.pre_variables.keys():
                var = listener.pre_variables[item]
                if var.keys().__contains__('write') and not var['write']:
                    output.append(
                        get_tabs(line) + '\t' + var['type'] + ' ' + item +
                        ';\n')
            output = output + func
            if listener.return_variable is not None:
                output.append(
                    get_tabs(line) + '\treturn ' + listener.return_variable +
                    ';\n')
            output.append(get_tabs(line) + '}\n')
        else:
            output.append(line)
        line_num += 1
    file1.close()

    file2 = open(conf['output_file'], 'w', encoding="utf-8", errors='ignore')
    for item in output:
        file2.write(item)
    file2.close()
Ejemplo n.º 21
0
def main(args):
    input_directory = args.directory
    input_java_files = [file for file in os.listdir(input_directory) if '.java' in file]
    refactoring_id = args.refactor
    source_class_data = None
    target_class = None
    target_class_data = None
    is_complete = False
    print("Process started")
    for i in range(2):
        for file in input_java_files:

            # Step 1: Load input source into stream
            if i == 0:
                stream = FileStream(input_directory + '/' + file, encoding='utf8', errors='ignore')
            else:
                stream = FileStream('benchmark_projects/refactored/' + '/' + file, encoding='utf8', errors='ignore')
            # input_stream = StdinStream()

            # Step 2: Create an instance of AssignmentStLexer
            lexer = JavaLexer(stream)
            # Step 3: Convert the input source into a list of tokens
            token_stream = CommonTokenStream(lexer)
            # Step 4: Create an instance of the AssignmentStParser
            parser = JavaParserLabeled(token_stream)
            tree = parser.compilationUnit()
            # Step 6: Create an instance of AssignmentStListener
            if refactoring_id == 'c':
                my_listener = CollapseHierarchyRefactoringListener(
                    common_token_stream=token_stream, source_class='JSONStringer',
                    target_class=target_class, source_class_data=source_class_data,
                    target_class_data=target_class_data, is_complete=is_complete
                )

                walker = ParseTreeWalker()
                walker.walk(t=tree, listener=my_listener)
                target_class = my_listener.target_class
                source_class_data = my_listener.source_class_data
                target_class_data = my_listener.target_class_data
                is_complete = my_listener.is_complete
            elif refactoring_id == 'i':
                my_listener = InlineClassRefactoringListener(
                    common_token_stream=token_stream, source_class='HTTPTokener',
                    target_class='JSONTokener', source_class_data=source_class_data,
                    target_class_data=target_class_data, is_complete=is_complete
                )
                walker = ParseTreeWalker()
                walker.walk(t=tree, listener=my_listener)
                target_class = my_listener.target_class
                source_class_data = my_listener.source_class_data
                target_class_data = my_listener.target_class_data
                is_complete = my_listener.is_complete
            elif refactoring_id == 'ms':
                my_listener = MakeMethodStaticRefactoringListener(
                    common_token_stream=token_stream, target_class='JSONPointer',
                    target_methods=['toURIFragment']
                )
                walker = ParseTreeWalker()
                walker.walk(t=tree, listener=my_listener)
            else:
                my_listener = MakeMethodNonStaticRefactoringListener(
                    common_token_stream=token_stream, target_class='JSONPointer',
                    target_methods=['builder']
                )
            with open('benchmark_projects/refactored/' + file, mode='w+', newline='') as f:
                f.write(my_listener.token_stream_rewriter.getDefaultText())
            print("/\\", end='')
Ejemplo n.º 22
0
def main(udb, child, parent):
    # initialize with understand
    udb_path = "/home/ali/Desktop/code/TestProject/TestProject.udb"
    child_path_file = ""
    father_path_file = ""
    file_list_to_be_propagate = set()
    propagate_classes = set()
    # db = und.open(udb)
    # for cls in db.ents("class"):
    #     if cls.simplename() == child:
    #         child_path_file = cls.parent().longname()
    #         for ref in cls.refs("Coupleby"):
    #             propagate_classes.add(ref.ent().longname())
    #             file_list_to_be_propagate.add(ref.ent().parent().longname())
    #     if cls.simplename() == parent:
    #         father_path_file = cls.parent().longname()

    file_list_to_be_propagate = list(file_list_to_be_propagate)
    propagate_classes = list(propagate_classes)

    stream = FileStream(child_path_file, encoding='utf8', errors='ignore')
    lexer = JavaLexer(stream)
    token_stream = CommonTokenStream(lexer)
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    parse_tree = parser.compilationUnit()
    my_listener_field_text = CollapseHierarchyRefactoringGetFieldTextListener(
        common_token_stream=token_stream, child_class=child)
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener_field_text)
    field_code = my_listener_field_text.fieldcode

    my_listener_method_text = CollapseHierarchyRefactoringGetMethodTextListener(
        common_token_stream=token_stream, child_class=child)
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener_method_text)
    methods_code = my_listener_method_text.methodcode

    # Remove child class
    my_listener_remove_child_class = RemoveClassRefactoringListener(
        common_token_stream=token_stream, class_name=child)
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener_remove_child_class)
    with open(child_path_file, mode='w', newline='') as f:
        f.write(my_listener_remove_child_class.token_stream_rewriter.
                getDefaultText())
    # Refactor
    stream = FileStream(father_path_file, encoding='utf8', errors='ignore')
    lexer = JavaLexer(stream)
    token_stream = CommonTokenStream(lexer)
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    parse_tree = parser.compilationUnit()
    my_listener_refactor_action = CollapseHierarchyRefactoringListener(
        common_token_stream=token_stream,
        parent_class=parent,
        child_class=child,
        field_text=field_code,
        method_text=methods_code)
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener_refactor_action)

    with open(father_path_file, mode='w', newline='') as f:
        f.write(
            my_listener_refactor_action.token_stream_rewriter.getDefaultText())
    # Propagate
    for file in file_list_to_be_propagate:
        stream = FileStream(file, encoding='utf8', errors='ignore')
        lexer = JavaLexer(stream)
        token_stream = CommonTokenStream(lexer)
        parser = JavaParserLabeled(token_stream)
        parser.getTokenStream()
        parse_tree = parser.compilationUnit()
        my_listener_propagate = PropagationCollapseHierarchyListener(
            token_stream_rewriter=token_stream,
            old_class_name=child,
            new_class_name=parent,
            propagated_class_name=propagate_classes)
        walker = ParseTreeWalker()
        walker.walk(t=parse_tree, listener=my_listener_propagate)

        with open(file, mode='w', newline='') as f:
            f.write(
                my_listener_propagate.token_stream_rewriter.getDefaultText())
Ejemplo n.º 23
0
def main():
    """
    it builds the parse tree and walk its corresponding walker so that our overridden methods run.
    """

    # udb_path = "/home/ali/Desktop/code/TestProject/TestProject.udb"
    # udb_path=create_understand_database("C:\\Users\\asus\\Desktop\\test_project")
    # source_class = "GodClass"
    # moved_methods = ['method1', 'method3', ]
    # moved_fields = ['field1', 'field2', ]
    udb_path = "C:\\Users\\asus\\Desktop\\test_project\\test_project.udb"
    # moved_methods = ['getValue', 'rowToJSONArray', 'getVal', ]
    # moved_fields = ['number_2', 'number_1', ]

    source_class = "GodClass"
    moved_methods = ['method1', 'method3']
    moved_fields = ['field1', 'field2']
    father_path_file = "/data/Dev/JavaSample/src/GodClass.java"
    father_path_directory = "/data/Dev/JavaSample/src"
    path_to_refactor = "/data/Dev/JavaSample/src"
    new_class_file = "/data/Dev/JavaSample/src/GodSubClass.java"

    # source_class = "TaskNode"
    # moved_methods = ['getUserObject']
    # moved_fields = []
    # father_path_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\ganttproject\\ganttproject\\src\\main\\java\\net\\sourceforge\\ganttproject\\task\\TaskNode.java"
    # father_path_directory = "C:\\Users\\asus\\Desktop\\benchmark_projects\\ganttproject\\ganttproject\\src\\main\\java\\net\\sourceforge\\ganttproject\\task"
    # path_to_refactor = "C:\\Users\\asus\\Desktop\\benchmark_projects\\ganttproject"
    # new_class_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\ganttproject\\ganttproject\\src\\main\\java\\net\\sourceforge\\ganttproject\\task\\TaskNodeextracted.java"

    # source_class = "SecuritySupport"
    # moved_methods = ['getSystemProperty']
    # moved_fields = []
    # father_path_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j\\src\\org\\apache\\html\\dom\\SecuritySupport.java"
    # father_path_directory = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j\\src\\org\\apache\\html\\dom"
    # path_to_refactor = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j"
    # new_class_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j\\src\\org\\apache\\html\\dom\\SecuritySupportextracted.java"

    # source_class = "BaseMarkupSerializer"
    # moved_methods = ['setOutputCharStream']
    # moved_fields = []
    # father_path_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j\\src\\org\\apache\\xml\\serialize\\BaseMarkupSerializer.java"
    # father_path_directory = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j\\src\\org\\apache\\xml\\serialize"
    # path_to_refactor = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j"
    # new_class_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j\\src\\org\\apache\\xml\\serialize\\BaseMarkupSerializerextracted.java"

    # source_class = "Piece"
    # moved_methods = ['setX']
    # moved_fields = []
    # father_path_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\Chess_master\\src\\game\\Piece.java"
    # father_path_directory = "C:\\Users\\asus\\Desktop\\benchmark_projects\\Chess_master\\src\\game"
    # path_to_refactor = "C:\\Users\\asus\\Desktop\\benchmark_projects\\Chess_master"
    # new_class_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\Chess_master\\src\\game\\Pieceextracted.java"

    stream = FileStream(father_path_file, encoding='utf8', errors='ignore')
    lexer = JavaLexer(stream)
    token_stream = CommonTokenStream(lexer)
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    parse_tree = parser.compilationUnit()
    my_listener = ExtractSubClassRefactoringListener(
        common_token_stream=token_stream,
        source_class=source_class,
        new_class=source_class + "extracted",
        moved_fields=moved_fields,
        moved_methods=moved_methods,
        output_path=father_path_directory)
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)

    with open(father_path_file, mode='w', newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())

    extractJavaFilesAndProcess(path_to_refactor, father_path_file,
                               new_class_file)

    for file in files_to_refactor:
        stream = FileStream(file, encoding='utf8', errors='ignore')
        lexer = JavaLexer(stream)
        token_stream = CommonTokenStream(lexer)
        parser = JavaParserLabeled(token_stream)
        parser.getTokenStream()
        parse_tree = parser.compilationUnit()

        my_listener = FindUsagesListener(common_token_stream=token_stream,
                                         source_class=source_class,
                                         new_class=source_class + "extracted",
                                         moved_fields=moved_fields,
                                         moved_methods=moved_methods,
                                         output_path=father_path_directory)

        # output_path=father_path_directory)

        walker = ParseTreeWalker()
        walker.walk(t=parse_tree, listener=my_listener)

        tmp_aul = my_listener.aul

        with open(file, mode='w', newline='') as f:
            f.write(my_listener.token_stream_rewriter.getDefaultText())

        # after find usages

        try:
            stream = FileStream(file, encoding='utf8', errors='ignore')
            lexer = JavaLexer(stream)
            token_stream = CommonTokenStream(lexer)
            parser = JavaParserLabeled(token_stream)
            parser.getTokenStream()
            parse_tree = parser.compilationUnit()

            my_listener = PropagationListener(
                common_token_stream=token_stream,
                source_class=source_class,
                new_class=source_class + "extracted",
                moved_fields=moved_fields,
                moved_methods=moved_methods,
                output_path=father_path_directory,
                aul=tmp_aul)

            walker = ParseTreeWalker()
            walker.walk(t=parse_tree, listener=my_listener)

            with open(file, mode='w', newline='') as f:
                f.write(my_listener.token_stream_rewriter.getDefaultText())
        except:
            print("not utf8")
Ejemplo n.º 24
0
def main(udb_path: str, children_classes: list, method_name: str, *args,
         **kwargs):
    """


    """

    if len(children_classes) <= 1:
        logger.error("len(children_classes) should be gte 2")
        return False

    # Initialize with understand
    destination_class = ""
    fileslist_to_be_rafeactored = set()
    fileslist_to_be_propagate = set()
    propagation_classes = set()

    db = und.open(udb_path)
    try:
        method_ents = [
            db.lookup(i + "." + method_name, "method")[0]
            for i in children_classes
        ]
    except IndexError:
        # print([db.lookup(i + "." + method_name, "method") for i in children_classes])
        logger.error(
            f"Method {method_name} does not exists in all children_classes.")
        db.close()
        return False

    # Get method text
    method_text = method_ents[0].contents().strip()

    for method_ent in method_ents:
        if method_ent.contents().strip() != method_text:
            logger.error("Method content is different.")
            db.close()
            return False

        for ref in method_ent.refs("Use,Call"):
            if ref.ent().parent() is not None:
                if ref.ent().parent().simplename() in children_classes:
                    logger.error("Method has internal dependencies.")
                    db.close()
                    return False

    for mth in db.ents("Java Method"):
        for child in children_classes:
            if mth.longname().endswith(child + "." + method_name):
                fileslist_to_be_rafeactored.add(
                    mth.parent().parent().longname())
                for fth in mth.parent().refs("Extend"):
                    destination_class = fth.ent().longname()
                    fileslist_to_be_rafeactored.add(
                        fth.ent().parent().longname())
                for ref in mth.refs("Java Callby"):
                    propagation_classes.add(ref.ent().parent().longname())
                    fileslist_to_be_propagate.add(
                        ref.ent().parent().parent().longname())

    db.close()

    # print("=========================================")
    # print("fileslist_to_be_propagate :", fileslist_to_be_propagate)
    # print("propagation_classes : ", propagation_classes)
    # print("fileslist_to_be_rafeactored :", fileslist_to_be_rafeactored)
    # print("father class :", destination_class)

    fileslist_to_be_rafeactored = list(fileslist_to_be_rafeactored)
    fileslist_to_be_propagate = list(fileslist_to_be_propagate)
    propagation_class = list(propagation_classes)

    # refactored start
    for file in fileslist_to_be_rafeactored:
        try:
            stream = FileStream(file, encoding='utf-8', errors='ignore')
        except:
            continue
        lexer = JavaLexer(stream)
        token_stream = CommonTokenStream(lexer)
        parser = JavaParserLabeled(token_stream)
        parser.getTokenStream()
        parse_tree = parser.compilationUnit()
        my_listener_refactor = PullUpMethodRefactoringListener(
            common_token_stream=token_stream,
            destination_class=destination_class,
            children_class=children_classes,
            moved_methods=method_name,
            method_text=method_text)
        walker = ParseTreeWalker()
        walker.walk(t=parse_tree, listener=my_listener_refactor)

        with open(file, mode='w', encoding='utf-8', newline='') as f:
            f.write(
                my_listener_refactor.token_stream_rewriter.getDefaultText())
    # end refactoring

    # beginning of propagate
    for file in fileslist_to_be_propagate:
        if not os.path.exists(file):
            continue
        stream = FileStream(file, encoding='utf-8', errors='ignore')
        lexer = JavaLexer(stream)
        token_stream = CommonTokenStream(lexer)
        parser = JavaParserLabeled(token_stream)
        parser.getTokenStream()
        parse_tree = parser.compilationUnit()
        my_listener_propagate = PropagationPullUpMethodRefactoringListener(
            token_stream_rewriter=token_stream,
            old_class_name=children_classes,
            new_class_name=destination_class,
            propagated_class_name=propagation_class)
        walker = ParseTreeWalker()
        walker.walk(t=parse_tree, listener=my_listener_propagate)

        with open(file, mode='w', encoding='utf8', errors='ignore',
                  newline='') as f:
            f.write(
                my_listener_propagate.token_stream_rewriter.getDefaultText())
    # end of propagate

    return True

if __name__ == '__main__':
    listener = ReplaceConditionalWithPolymorphism(
        "../../tests/replace_conditional_with_polymorphism/sample.java",
        "SwitchDemo", "myMethod")
    try:
        stream = FileStream(listener.file)
    except:
        print("Path not found")
        pass
    classname = listener.class_name
    func = listener.method_name
    lexer = JavaLexer(stream)
    tokens = CommonTokenStream(lexer)
    parser = JavaParserLabeled(tokens)
    tree = parser.compilationUnit()
    walker = ParseTreeWalker()
    walker.walk(listener=listener, t=tree)
    token = lexer.reset()
    if len(found_class) > 0 and len(found_func) > 0:
        token = lexer.nextToken()
        not_switch = True
        opening = ""
        while token.type != Token.EOF:
            if not_switch:
                if token.type != lexer.SWITCH:
                    opening += token.text
            if token.type == lexer.SWITCH:
                not_switch = False
                token = lexer.nextToken()
Ejemplo n.º 26
0
def main(args):
    files = get_file_dirs(args.dir)

    create_new_project_dir('JavaProjectRefactored', files)
    ref = input(" choose your refactoring :")

    for file in files:

        # Step 1: Load input source into stream

        m = re.search(r'^.*\.java$', file)
        if m is None:
            continue

        print(file)

        stream = FileStream(file, encoding='utf8', errors='ignore')
        # input_stream = StdinStream()

        # Step 2: Create an instance of AssignmentStLexer
        lexer = JavaLexer(stream)
        # Step 3: Convert the input source into a list of tokens
        token_stream = CommonTokenStream(lexer)
        # Step 4: Create an instance of the AssignmentStParser
        parser = JavaParserLabeled(token_stream)

        parser.getTokenStream()
        # Step 5: Create parse tree
        parse_tree = parser.compilationUnit()
        # Step 6: Create an instance of AssignmentStListener

        # my_listener = RenameClassRefactoringListener(common_token_stream=token_stream, class_new_name='Z',
        #                                                 class_identifier='A', package_identifier="Dummy")
        if ref == "Rename":
            print("Rename class  =>")
            my_listener = RenameClassRefactoringListener(
                common_token_stream=token_stream,
                class_new_name='Z',
                class_identifier='ReflectiveXmlRpcMetaDataHandler',
                package_identifier="org.apache.xmlrpc.metadata")
        elif ref == "Static":
            print("Make field static  =>")
            my_listener = MakeFieldStaticRefactoringListener(
                common_token_stream=token_stream,
                field_identifier='methodHelp',
                class_identifier='ReflectiveXmlRpcMetaDataHandler',
                package_identifier="org.apache.xmlrpc.metadata")
        elif ref == "Non-Static":
            print("Make field Non static  =>")
            my_listener = MakeFieldNonStaticRefactoringListener(
                common_token_stream=token_stream,
                field_identifier='log',
                class_identifier='XmlRpcErrorLogger',
                package_identifier="org.apache.xmlrpc.server")
        else:
            print("Not Valid")
            break

        walker = ParseTreeWalker()
        walker.walk(t=parse_tree, listener=my_listener)

        if ref == "Non Static" and my_listener.canceled:
            rewrite_project(files, 'JavaProjectRefactored')
            break

        if ref == "Rename" and my_listener.in_selected_class:
            splited_dir = file.split('/')
            splited_dir[0] = 'JavaProjectRefactored'
            if os.path.exists("/".join(splited_dir)):
                os.remove("/".join(splited_dir))
            splited_dir[-1] = my_listener.class_new_name + ".java"
            with open("/".join(splited_dir), mode='w', newline='') as f:
                f.write(my_listener.token_stream_rewriter.getDefaultText())
        else:
            splited_dir = file.split('/')
            splited_dir[0] = 'JavaProjectRefactored'
            with open("/".join(splited_dir), mode='w', newline='') as f:
                f.write(my_listener.token_stream_rewriter.getDefaultText())
Ejemplo n.º 27
0
    #             self.token_stream_rewriter.replaceRange(
    #                 from_idx=ctx.start.tokenIndex+1,
    #                 to_idx=ctx.start.tokenIndex+1,
    #                 text="\n"+self.field_text+"\n"
    #             )


if __name__ == '__main__':
    udb_path = "/home/ali/Desktop/code/TestProject/TestProject.udb"
    source_class = "Triangle"
    # initialize with understand
    main_file = ""
    db = und.open(udb_path)
    for cls in db.ents("class"):
        if cls.simplename() == source_class:
            main_file = cls.parent().longname()
    db.close()
    stream = FileStream(main_file, encoding='utf8', errors='ignore')
    lexer = JavaLexer(stream)
    token_stream = CommonTokenStream(lexer)
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    parse_tree = parser.compilationUnit()
    my_listener = MakeNonFinalClassRefactoringListener(common_token_stream=token_stream,
                                                       class_name=source_class)
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)

    with open(main_file, mode='w', encoding='utf8', errors='ignore', newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())
Ejemplo n.º 28
0
class ExtractClassAPI:
    def __init__(self,
                 udb_path,
                 file_path,
                 source_class,
                 new_class,
                 moved_fields,
                 moved_methods,
                 new_file_path=None):
        self.file_path = file_path
        self.udb_path = udb_path
        self.new_file_path = new_file_path
        self.source_class = source_class
        self.new_class = new_class
        self.moved_fields = moved_fields
        self.moved_methods = moved_methods
        self.stream = FileStream(self.file_path,
                                 encoding="utf-8",
                                 errors='ignore')
        self.lexer = JavaLexer(self.stream)
        self.token_stream = CommonTokenStream(self.lexer)
        self.parser = JavaParserLabeled(self.token_stream)
        self.tree = self.parser.compilationUnit()
        self.walker = ParseTreeWalker()
        self.method_usage_map = {}
        self.pass_this = False
        self.TAB = "\t"
        self.object_name = ""

    def check_dependency_graph(self):
        listener = DependencyPreConditionListener(
            common_token_stream=self.token_stream,
            class_identifier=self.source_class)
        self.walker.walk(listener=listener, t=self.tree)
        component = sorted(self.moved_methods + self.moved_fields)
        if component in sorted(listener.connected_components):
            self.checked = True
        if len(listener.connected_components) == 0:
            self.checked = True

    def get_source_class_map(self):
        _db = und.open(self.udb_path)
        class_ents = _db.lookup(self.source_class, "Class")
        class_ent = None
        for ent in class_ents:
            if ent.parent() is not None:
                if Path(ent.parent().longname()) == Path(self.file_path):
                    class_ent = ent
                    break
        if class_ent is None:
            _db.close()
            return

        for ref in class_ent.refs("Define", "Method"):
            method_ent = ref.ent()
            self.method_usage_map[method_ent.simplename()] = set()
            for use in method_ent.refs("Setby Useby Modifyby, Call",
                                       "Variable ~Unknown, Method ~Unknown"):
                self.method_usage_map[method_ent.simplename()].add(
                    use.ent().simplename())
        _db.close()

    def propagate_fields(self, usages):
        for usage in usages:
            file_path = usage.pop('file_path')
            stream = FileStream(file_path, encoding='utf-8', errors='ignore')
            lexer = JavaLexer(stream)
            token_stream = CommonTokenStream(lexer)
            parser = JavaParserLabeled(token_stream)
            parse_tree = parser.compilationUnit()
            my_listener = PropagateFieldUsageListener(
                common_token_stream=token_stream,
                object_name=self.object_name,
                **usage)
            walker = ParseTreeWalker()
            walker.walk(t=parse_tree, listener=my_listener)

            # print(my_listener.token_stream_rewriter.getDefaultText())
            with open(file_path, mode='w', encoding='utf-8',
                      errors='ignore') as f:
                f.write(my_listener.token_stream_rewriter.getDefaultText())
            self.reformat(file_path)

    @staticmethod
    def reformat(file_path: str):
        # formatter = os.path.abspath("../assets/formatter/google-java-format-1.10.0-all-deps.jar")
        # subprocess.call(["java", "-jar", formatter, "--replace", file_path])
        pass

    def do_refactor(self):
        listener = ExtractClassRefactoringListener(
            common_token_stream=self.token_stream,
            new_class=self.new_class,
            source_class=self.source_class,
            moved_fields=self.moved_fields,
            moved_methods=self.moved_methods,
            method_map=self.method_usage_map)
        self.object_name = listener.object_name
        self.walker.walk(listener=listener, t=self.tree)

        # Find Field and Method Usages
        _db = und.open(self.udb_path)
        field_usages = []
        for field in self.moved_fields:
            for ent in _db.lookup(f"{self.source_class}.{field}"):
                # print(ent.name(), "  [", ent.kindname(), "]", sep="", end="\n")
                for ref in ent.refs("Useby, Setby, Modifyby"):
                    if Path(ref.file().longname()) == Path(self.file_path):
                        continue
                    field_usage = {
                        'field_name': field,
                        'file_path': ref.file().longname()
                    }
                    if field_usage not in field_usages:
                        field_usages.append(field_usage)
        _db.close()
        # print(listener.token_stream_rewriter.getDefaultText())
        # print("=" * 25)
        # print(listener.code)
        stream = InputStream(listener.code)
        lexer = JavaLexer(stream)
        token_stream = CommonTokenStream(lexer)
        parser = JavaParserLabeled(token_stream)
        parser.getTokenStream()
        parse_tree = parser.compilationUnit()
        my_listener = NewClassPropagation(common_token_stream=token_stream,
                                          method_map=self.method_usage_map,
                                          source_class=self.source_class,
                                          moved_fields=self.moved_fields)
        walker = ParseTreeWalker()
        walker.walk(t=parse_tree, listener=my_listener)
        # print(my_listener.token_stream_rewriter.getDefaultText())

        # Write Changes
        with open(self.file_path,
                  mode='w',
                  encoding='utf-8',
                  errors='ignore',
                  newline='') as f:
            f.write(listener.token_stream_rewriter.getDefaultText())

        # Write new class
        with open(self.new_file_path,
                  mode='w',
                  encoding='utf-8',
                  errors='ignore',
                  newline='') as f:
            f.write(my_listener.token_stream_rewriter.getDefaultText())

        # Propagate and reformat
        self.propagate_fields(field_usages)
        self.reformat(self.file_path)
        self.reformat(self.new_file_path)

        return True
Ejemplo n.º 29
0
    def do_refactor(self):
        listener = ExtractClassRefactoringListener(
            common_token_stream=self.token_stream,
            new_class=self.new_class,
            source_class=self.source_class,
            moved_fields=self.moved_fields,
            moved_methods=self.moved_methods,
            method_map=self.method_usage_map)
        self.object_name = listener.object_name
        self.walker.walk(listener=listener, t=self.tree)

        # Find Field and Method Usages
        _db = und.open(self.udb_path)
        field_usages = []
        for field in self.moved_fields:
            for ent in _db.lookup(f"{self.source_class}.{field}"):
                # print(ent.name(), "  [", ent.kindname(), "]", sep="", end="\n")
                for ref in ent.refs("Useby, Setby, Modifyby"):
                    if Path(ref.file().longname()) == Path(self.file_path):
                        continue
                    field_usage = {
                        'field_name': field,
                        'file_path': ref.file().longname()
                    }
                    if field_usage not in field_usages:
                        field_usages.append(field_usage)
        _db.close()
        # print(listener.token_stream_rewriter.getDefaultText())
        # print("=" * 25)
        # print(listener.code)
        stream = InputStream(listener.code)
        lexer = JavaLexer(stream)
        token_stream = CommonTokenStream(lexer)
        parser = JavaParserLabeled(token_stream)
        parser.getTokenStream()
        parse_tree = parser.compilationUnit()
        my_listener = NewClassPropagation(common_token_stream=token_stream,
                                          method_map=self.method_usage_map,
                                          source_class=self.source_class,
                                          moved_fields=self.moved_fields)
        walker = ParseTreeWalker()
        walker.walk(t=parse_tree, listener=my_listener)
        # print(my_listener.token_stream_rewriter.getDefaultText())

        # Write Changes
        with open(self.file_path,
                  mode='w',
                  encoding='utf-8',
                  errors='ignore',
                  newline='') as f:
            f.write(listener.token_stream_rewriter.getDefaultText())

        # Write new class
        with open(self.new_file_path,
                  mode='w',
                  encoding='utf-8',
                  errors='ignore',
                  newline='') as f:
            f.write(my_listener.token_stream_rewriter.getDefaultText())

        # Propagate and reformat
        self.propagate_fields(field_usages)
        self.reformat(self.file_path)
        self.reformat(self.new_file_path)

        return True
Ejemplo n.º 30
0
def main(udb_path, source_package, source_class, method_name,
         target_classes: list, *args, **kwargs):
    """

    The main API for the push-down method refactoring operation

    """
    target_package = source_package
    source_method = method_name

    main_file = None
    source_method_entity = None
    is_static = False
    propagation_files = []
    propagation_classes = []
    propagation_lines = []
    children_classes = []
    children_files = []

    # Initialize with understand
    db = und.open(udb_path)
    methods = db.ents("Java Method")
    for mth in methods:
        if mth.longname(
        ) == source_package + "." + source_class + "." + source_method:
            source_method_entity = mth
            for child_ref in mth.parent().refs("Extendby"):
                child_ref = child_ref.ent()
                if child_ref.simplename() in target_classes:
                    children_classes.append(child_ref.simplename())
                    children_files.append(child_ref.parent().longname())
            # print("mainfile : ", mth.parent().parent().longname())
            is_static = mth.kind().check("static")
            main_file = mth.parent().parent().longname()
            for ref in mth.refs("Callby"):
                propagation_files.append(
                    ref.ent().parent().parent().longname())
                propagation_classes.append(ref.ent().parent().simplename())
                propagation_lines.append(ref.line())

    # Check pre-condition
    if not len(target_classes) == 1:
        logger.error(f"len(target_classes) is not 1.")
        db.close()
        return False

    if not len(children_classes) == 1:
        logger.error(f"len(children_classes) is not 1.")
        db.close()
        return False

    if not len(children_files) == 1:
        logger.error(f"len(children_files) is not 1.")
        db.close()
        return False

    for mth in methods:
        if mth.simplename() == source_method:
            if mth.parent().simplename() in target_classes:
                if mth.type() == source_method_entity.type():
                    if mth.kind() == source_method_entity.kind():
                        if mth.parameters() == source_method_entity.parameters(
                        ):
                            logger.error("Duplicated method")
                            db.close()
                            return False

    for ref in source_method_entity.refs("use, call"):
        ref_ent = ref.ent()
        is_public = ref_ent.kind().check("public")
        if not is_public:
            logger.error("Has internal dependencies.")
            db.close()
            return False

    #  get text
    method_text = source_method_entity.contents()

    db.close()

    # Delete source method
    stream = FileStream(main_file, encoding='utf8', errors='ignore')
    lexer = JavaLexer(stream)
    token_stream = CommonTokenStream(lexer)
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    parse_tree = parser.compilationUnit()
    my_listener = DeleteSourceListener(common_token_stream=token_stream,
                                       source_method=source_method)
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)
    # print(my_listener.token_stream_rewriter.getDefaultText())
    with open(main_file, mode='w', encoding='utf-8', newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())

    # Do the push down
    for child_file, child_class in zip(children_files, children_classes):
        stream = FileStream(child_file, encoding='utf8', errors='ignore')
        lexer = JavaLexer(stream)
        token_stream = CommonTokenStream(lexer)
        parser = JavaParserLabeled(token_stream)
        parser.getTokenStream()
        parse_tree = parser.compilationUnit()
        my_listener = PushDownMethodRefactoringListener(
            common_token_stream=token_stream,
            source_class=child_class,
            source_method_text=method_text)
        walker = ParseTreeWalker()
        walker.walk(t=parse_tree, listener=my_listener)
        # print(my_listener.token_stream_rewriter.getDefaultText())
        with open(child_file, mode='w', encoding='utf8', newline='') as f:
            f.write(my_listener.token_stream_rewriter.getDefaultText())

    # Propagation
    for file, _class, line in zip(propagation_files, propagation_classes,
                                  propagation_lines):
        stream = FileStream(file, encoding='utf8', errors='ignore')
        lexer = JavaLexer(stream)
        token_stream = CommonTokenStream(lexer)
        parser = JavaParserLabeled(token_stream)
        parser.getTokenStream()
        parse_tree = parser.compilationUnit()
        if is_static:
            my_listener = PropagationStaticListener(
                common_token_stream=token_stream,
                source_class=source_class,
                child_class=children_classes[0],
                class_name=_class,
                method_name=source_method,
                ref_line=line,
                target_package=target_package)
        else:
            my_listener = PropagationNonStaticListener(
                common_token_stream=token_stream,
                source_class=source_class,
                child_class=children_classes[0],
                class_name=_class,
                method_name=source_method,
                ref_line=line,
                target_package=target_package)
        walker = ParseTreeWalker()
        walker.walk(t=parse_tree, listener=my_listener)
        # print(my_listener.token_stream_rewriter.getDefaultText())
        with open(file, mode='w', encoding='utf8', errors='ignore',
                  newline='') as f:
            f.write(my_listener.token_stream_rewriter.getDefaultText())

    return True