Beispiel #1
0
def main(udb_path, source_class, field_name):
    udb_path = "/home/ali/Desktop/code/TestProject/TestProject.udb"
    source_class = "Website"
    field_name = "HELLO_FROM_STUDENT_WEBSITE"
    print("Make Field Static")
    main_file = ""
    db = und.open(udb_path)
    for cls in db.ents("class"):
        if cls.simplename() == source_class:
            main_file = cls.parent().longname(True)
            if not os.path.isfile(main_file):
                continue
    if main_file is None:
        return

    stream = FileStream(main_file, encoding='utf8')
    lexer = JavaLexer(stream)
    token_stream = CommonTokenStream(lexer)
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    parse_tree = parser.compilationUnit()
    my_listener = MakeFieldStaticRefactoringListener(common_token_stream=token_stream, source_class=source_class,
                                                     field_name=field_name)
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)

    with open(main_file, mode='w', newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())
Beispiel #2
0
def main():
    udb_path = "/home/ali/Desktop/code/TestProject/TestProject.udb"
    source_class = "App"
    method_name = "testMethod"
    # initialize with understand
    main_file = ""
    db = und.open(udb_path)
    for cls in db.ents("class"):
        if cls.simplename() == source_class:
            main_file = cls.parent().longname()

    stream = FileStream(main_file, encoding='utf8')
    lexer = JavaLexer(stream)
    token_stream = CommonTokenStream(lexer)
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    parse_tree = parser.compilationUnit()
    my_listener = IncreaseMethodVisibilityRefactoringListener(
        common_token_stream=token_stream,
        source_class=source_class,
        method_name=method_name)
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)

    with open(main_file, mode='w', newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())
Beispiel #3
0
def main(args, i):
    # Step 1: Load input source into stream
    stream = FileStream(args.file, encoding='utf8')
    # Step 2: Create an instance of AssignmentStLexer
    lexer = JavaLexer(stream)
    # Step 3: Convert the input source into a list of tokens
    token_stream = CommonTokenStream(lexer)
    # Step 4: Create an instance of the AssignmentStParser
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    # Step 5: Create parse tree
    parse_tree = parser.compilationUnit()
    # Step 6: Create an instance of the refactoringListener, and send as a parameter the list of tokens to the class
    # my_listener = EncapsulateFiledRefactoringListener(common_token_stream=token_stream, class_identifier='A')
    my_listener = SingletonRefactoringListener(
        common_token_stream=token_stream, class_identifier='GeneralPurposeBit')
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)

    newpath = "Refactored" + args.file
    if not os.path.exists(os.path.dirname(newpath)):
        try:
            os.makedirs(os.path.dirname(newpath))
        except OSError as exc:  # Guard against race condition
            pass
    with open(newpath, mode='w', newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())
Beispiel #4
0
def main(args):
    # Step 1: Load input source into stream
    stream = FileStream(args.file, encoding='utf8', errors='ignore')
    # input_stream = StdinStream()

    # Step 2: Create an instance of AssignmentStLexer
    lexer = JavaLexer(stream)
    # Step 3: Convert the input source into a list of tokens
    token_stream = CommonTokenStream(lexer)
    # Step 4: Create an instance of the AssignmentStParser
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    # Step 5: Create parse tree
    parse_tree = parser.compilationUnit()
    # Step 6: Create an instance of the refactoringListener, and send as a parameter the list of tokens to the class
    my_listener = FactoryMethodRefactoringListener(
        common_token_stream=token_stream,
        creator_identifier='FactoryMethod',
        products_identifier=['JpegReader', 'GifReader'])
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)

    with open('../tests/factory1/FactoryExample.refactored.java',
              mode='w',
              newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())
Beispiel #5
0
def main(args):
    # Step 1: Load input source into stream
    stream = FileStream(args.file, encoding='utf8')
    # input_stream = StdinStream()

    # Step 2: Create an instance of AssignmentStLexer
    lexer = JavaLexer(stream)
    # Step 3: Convert the input source into a list of tokens
    token_stream = CommonTokenStream(lexer)
    # Step 4: Create an instance of the AssignmentStParser
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()

    print("=====Enter Create ParseTree=====")
    # Step 5: Create parse tree
    parse_tree = parser.compilationUnit()
    print("=====Create ParseTree Finished=====")

    # Step 6: Create an instance of AssignmentStListener
    my_listener = RemoveFieldRefactoringListener(common_token_stream=token_stream, class_identifier='User',
                                                 fieldname='test_var', filename=args.file)

    # return
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)

    with open(args.file, mode='w', newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())
Beispiel #6
0
    def extract_subclass(self):
        # udb_path = "/home/ali/Desktop/code/TestProject/TestProject.udb"
        # udb_path=create_understand_database("C:\\Users\\asus\\Desktop\\test_project")
        # source_class = "GodClass"
        # moved_methods = ['method1', 'method3', ]
        # moved_fields = ['field1', 'field2', ]
        udb_path = "C:\\Users\\asus\\Desktop\\test_project\\test_project.udb"
        source_class = "CDL"
        moved_methods = [
            'getValue',
            'rowToJSONArray',
            'getVal',
        ]
        moved_fields = [
            'number',
            'number_2',
            'number_1',
        ]

        # initialize with understand
        father_path_file = ""
        file_list_to_be_propagate = set()
        propagate_classes = set()

        db = und.open(udb_path)
        # db=open(udb_path)

        for cls in db.ents("class"):
            if (cls.simplename() == source_class):
                father_path_file = cls.parent().longname()
                for ref in cls.refs("Coupleby"):
                    # print(ref.ent().longname())
                    propagate_classes.add(ref.ent().longname())
                    # print(ref.ent().parent().relname())
                    # file_list_to_be_propagate.add(ref.ent().parent().relname())
            # if(cls.longname()==fatherclass):
            #     print(cls.parent().relname())
            #     father_path_file=cls.parent().relname()

        father_path_file = "C:\\Users\\asus\\Desktop\\test_project\\CDL.java"
        father_path_directory = "C:\\Users\\asus\\Desktop\\test_project"

        stream = FileStream(father_path_file, encoding='utf8')
        lexer = JavaLexer(stream)
        token_stream = CommonTokenStream(lexer)
        parser = JavaParserLabeled(token_stream)
        parser.getTokenStream()
        parse_tree = parser.compilationUnit()
        my_listener = ExtractSubClassRefactoringListener(
            common_token_stream=token_stream,
            source_class=source_class,
            new_class=source_class + "extracted",
            moved_fields=moved_fields,
            moved_methods=moved_methods,
            output_path=father_path_directory)
        walker = ParseTreeWalker()
        walker.walk(t=parse_tree, listener=my_listener)

        with open(father_path_file, mode='w', newline='') as f:
            f.write(my_listener.token_stream_rewriter.getDefaultText())
Beispiel #7
0
def main(udb_path, source_class, field_name, *args, **kwargs):
    main_file = None
    db = und.open(udb_path)
    for cls in db.ents("class"):
        if cls.simplename() == source_class:
            main_file = cls.parent().longname(True)
            if not os.path.isfile(main_file):
                continue
    if main_file is None:
        db.close()
        return

    stream = FileStream(main_file, encoding='utf8')
    lexer = JavaLexer(stream)
    token_stream = CommonTokenStream(lexer)
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    parse_tree = parser.compilationUnit()
    my_listener = MakeFieldNonStaticRefactoringListener(common_token_stream=token_stream, source_class=source_class,
                                                        field_name=field_name)
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)

    with open(main_file, mode='w', newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())
    db.close()
Beispiel #8
0
    def do_refactor(self):
        db = und.open(self.udb_path)
        self.get_source_class_map(db=db)
        listener = ExtractClassRefactoringListener(
            common_token_stream=self.token_stream,
            new_class=self.new_class,
            source_class=self.source_class,
            moved_fields=self.moved_fields,
            moved_methods=self.moved_methods,
            method_map=self.method_usage_map)
        self.object_name = listener.object_name
        self.walker.walk(listener=listener, t=self.tree)

        # Find Field and Method Usages
        field_usages = []

        for field in self.moved_fields:
            for ent in db.lookup(f"{self.source_class}.{field}"):
                # print(ent.name(), "  [", ent.kindname(), "]", sep="", end="\n")
                for ref in ent.refs("useBy, setBy, modifyBy"):
                    if Path(ref.file().longname()) == Path(self.file_path):
                        continue
                    field_usage = {
                        'field_name': field,
                        'file_path': ref.file().longname()
                    }
                    if field_usage not in field_usages:
                        field_usages.append(field_usage)

        # print(listener.token_stream_rewriter.getDefaultText())
        # print("=" * 25)
        # print(listener.code)
        stream = InputStream(listener.code)
        lexer = JavaLexer(stream)
        token_stream = CommonTokenStream(lexer)
        parser = JavaParserLabeled(token_stream)
        parser.getTokenStream()
        parse_tree = parser.compilationUnit()
        my_listener = NewClassPropagation(common_token_stream=token_stream,
                                          method_map=self.method_usage_map,
                                          source_class=self.source_class,
                                          moved_fields=self.moved_fields)
        walker = ParseTreeWalker()
        walker.walk(t=parse_tree, listener=my_listener)
        # print(my_listener.token_stream_rewriter.getDefaultText())

        # Write Changes
        with open(self.file_path, 'w') as f:
            f.write(listener.token_stream_rewriter.getDefaultText())

        with open(self.new_file_path, 'w') as f:
            f.write(my_listener.token_stream_rewriter.getDefaultText())

        # Propagate and reformat
        self.propagate_fields(field_usages)
        self.reformat(self.file_path)
        self.reformat(self.new_file_path)
        db.close()
class RemoveFlagArgument:
    """Refactoring API that can be used to to do remove flag argument 

    """
    def __init__(self,
                 source_class="Playground",
                 source_method="DeliveryDate",
                 argument_name="b",
                 main_file="playground.java"):
        """create a removeflagargument refactor 

        Args:
            source_class (str): class name contaminated by code smell.
            source_mathod (str): method name contaminated.
            argument_name (str): boolean argument in method.
            main_file (str): path of main file containing source class.
        """

        self.source_class = source_class
        self.source_method = source_method
        self.arguemnt_name = argument_name
        self.main_file = main_file

        self.stream = FileStream(self.main_file,
                                 encoding='utf8',
                                 errors='ignore')
        self.lexer = JavaLexer(self.stream)
        self.token_stream = CommonTokenStream(self.lexer)
        self.parser = JavaParserLabeled(self.token_stream)
        self.parser.getTokenStream()
        self.parse_tree = self.parser.compilationUnit()
        self.my_listener = RemoveFlagArgumentListener(
            common_token_stream=self.token_stream,
            source_class=self.source_class,
            source_method=self.source_method,
            argument_name=self.arguemnt_name)

    def do_refactor(self):
        """removes flag argument logic and replace it by two method call of the new method generated from extracted
            login in if else block
        """
        walker = ParseTreeWalker()
        walker.walk(t=self.parse_tree, listener=self.my_listener)

        # self.my_listener.body_1

        with open(self.main_file, 'w') as f:
            f.write(self.my_listener.token_stream_rewriter.getDefaultText())
Beispiel #10
0
def main():
    udb_path = "/home/ali/Desktop/code/TestProject/TestProject.udb"
    source_class = "GodClass"
    moved_methods = [
        'method1',
        'method3',
    ]
    moved_fields = [
        'field1',
        'field2',
    ]

    # initialize with understand
    father_path_file = ""
    file_list_to_be_propagate = set()
    propagate_classes = set()

    db = und.open(udb_path)

    for cls in db.ents("class"):
        if (cls.simplename() == source_class):
            father_path_file = cls.parent().longname()
            for ref in cls.refs("Coupleby"):
                # print(ref.ent().longname())
                propagate_classes.add(ref.ent().longname())
                # print(ref.ent().parent().relname())
                # file_list_to_be_propagate.add(ref.ent().parent().relname())
        # if(cls.longname()==fatherclass):
        #     print(cls.parent().relname())
        #     father_path_file=cls.parent().relname()

    stream = FileStream(father_path_file, encoding='utf8')
    lexer = JavaLexer(stream)
    token_stream = CommonTokenStream(lexer)
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    parse_tree = parser.compilationUnit()
    my_listener = ExtractSubClassRefactoringListener(
        common_token_stream=token_stream,
        source_class=source_class,
        new_class=source_class + "extracted",
        moved_fields=moved_fields,
        moved_methods=moved_methods)
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)

    with open(father_path_file, mode='w', newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())
def main(udb_path, source_class, method_name, *args, **kwargs):
    """


    """

    main_file = None
    db = und.open(udb_path)
    classes = db.ents("Class")
    for cls in classes:
        if cls.simplename() == source_class:
            if cls.parent() is not None:
                temp_file = str(cls.parent().longname(True))
                if os.path.isfile(temp_file):
                    main_file = temp_file
                    break

    if main_file is None:
        db.close()
        return False

    db.close()

    stream = FileStream(main_file, encoding='utf8', errors='ignore')
    lexer = JavaLexer(stream)
    token_stream = CommonTokenStream(lexer)
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    parse_tree = parser.compilationUnit()
    my_listener = MakeMethodStaticRefactoringListener(
        common_token_stream=token_stream,
        source_class=source_class,
        method_name=method_name)
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)

    with open(main_file,
              mode='w',
              encoding='utf8',
              errors='ignore',
              newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())

    return True
Beispiel #12
0
def get_parse_tree_token_stream(args):
    """
    returns parse tree and token stream base on the file stream
    :param args: file arguments
    """

    # Step 1: Load input source into stream
    stream = FileStream(args.file, encoding='utf8')
    # Step 2: Create an instance of AssignmentStLexer
    lexer = JavaLexer(stream)
    # Step 3: Convert the input source into a list of tokens
    token_stream = CommonTokenStream(lexer)
    # Step 4: Create an instance of the AssignmentStParser
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    # Step 5: Create parse tree
    parse_tree = parser.compilationUnit()

    return parse_tree, token_stream
Beispiel #13
0
def main(args):
    # Step 1: Load input source into stream
    begin_time = time()
    stream = FileStream(args.file, encoding='utf8', errors='ignore')
    # input_stream = StdinStream()
    print('Input stream:')
    print(stream)

    # Step 2: Create an instance of AssignmentStLexer
    lexer = JavaLexer(stream)
    # Step 3: Convert the input source into a list of tokens
    token_stream = CommonTokenStream(lexer)
    # Step 4: Create an instance of the AssignmentStParser
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    # Step 5: Create parse tree
    parse_tree = parser.compilationUnit()
    # Step 6: Create an instance of the refactoringListener, and send as a parameter the list of tokens to the class
    my_listener = VisitorPatternRefactoringListener(
        common_token_stream=token_stream,
        SuperClass_identifier='SC',
        SubClass_identifier=['CC1', 'CC2', 'CC3'])
    #                                                    SuperClass_identifier='ComputerPart',
    #                                                    SubClass_identifier=['Keyboard', 'Monitor', 'Mouse', 'Computer'])
    #                                                    SuperClass_identifier='Shape',
    #                                                    SubClass_identifier=['Polygon', 'Rectangle','Arrow'])

    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)

    print('Compiler result:')
    print(my_listener.token_stream_rewriter.getDefaultText())

    with open('../tests/visitor1/VisitorExample0.refactored.java',
              mode='w',
              newline='') as f:
        #   with open('VisitorExample1.refactored.java', mode='w', newline='') as f:
        #    with open('VisitorExample2.refactored.java', mode='w', newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())

    end_time = time()
    print("time execution : ", end_time - begin_time)
Beispiel #14
0
    def add_implement_statement_to_class(self, ):
        stream = FileStream(self.class_path, encoding='utf8', errors='ignore')
        lexer = JavaLexer(stream)
        token_stream = CommonTokenStream(lexer)
        parser = JavaParserLabeled(token_stream)
        parser.getTokenStream()
        parse_tree = parser.compilationUnit()
        listener = AddingImplementStatementToClass(
            common_token_stream=token_stream,
            class_name=os.path.splitext(os.path.basename(self.class_path))[0],
            interface_package=self.interface_info['package'],
            interface_name=self.interface_info['name'])
        walker = ParseTreeWalker()
        walker.walk(t=parse_tree, listener=listener)

        with open(self.class_path,
                  encoding='utf8',
                  errors='ignore',
                  mode='w',
                  newline='') as f:
            f.write(listener.token_stream_rewriter.getDefaultText())
def main(udb_path, target_class, target_methods):
    main_file = ""
    db = understand.open(udb_path)
    for cls in db.ents("class"):
        if cls.simplename() == target_class:
            main_file = cls.parent().longname()

    stream = FileStream(main_file, encoding='utf8')
    lexer = JavaLexer(stream)
    token_stream = CommonTokenStream(lexer)
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    parse_tree = parser.compilationUnit()
    my_listener = MakeMethodNonStaticRefactoringListener(
        common_token_stream=token_stream,
        target_class=target_class,
        target_methods=target_methods)
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)

    with open(main_file, mode='w', newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())
def main(args):
    # Step 1: Load input source into stream
    begin_time = time()
    stream = FileStream(args.file, encoding='utf8')
    # input_stream = StdinStream()
    print('Input stream:')
    print(stream)

    # Step 2: Create an instance of AssignmentStLexer
    lexer = JavaLexer(stream)
    # Step 3: Convert the input source into a list of tokens
    token_stream = CommonTokenStream(lexer)
    # Step 4: Create an instance of the AssignmentStParser
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    # Step 5: Create parse tree
    parse_tree = parser.compilationUnit()
    # Step 6: Create an instance of the refactoringListener, and send as a parameter the list of tokens to the class
    my_listener = StrategyPatternRefactoringListener(
        common_token_stream=token_stream, method_identifier='execute')
    #                                                     method_identifier='read')
    #                                                     method_identifier='write')

    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)

    print('Compiler result:')
    print(my_listener.token_stream_rewriter.getDefaultText())

    with open('StrategyExample0.refactored.java', mode='w', newline='') as f:
        #    with open('StrategyExample1.refactored.java', mode='w', newline='') as f:
        #    with open('StrategyExample2.refactored.java', mode='w', newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())

    end_time = time()
    print("execute time : ", end_time - begin_time)
Beispiel #17
0
def main(udb_path: str, children_classes: list, method_name: str, *args,
         **kwargs):
    """


    """

    if len(children_classes) <= 1:
        logger.error("len(children_classes) should be gte 2")
        return False

    # Initialize with understand
    destination_class = ""
    fileslist_to_be_rafeactored = set()
    fileslist_to_be_propagate = set()
    propagation_classes = set()

    db = und.open(udb_path)
    try:
        method_ents = [
            db.lookup(i + "." + method_name, "method")[0]
            for i in children_classes
        ]
    except IndexError:
        # print([db.lookup(i + "." + method_name, "method") for i in children_classes])
        logger.error(
            f"Method {method_name} does not exists in all children_classes.")
        db.close()
        return False

    # Get method text
    method_text = method_ents[0].contents().strip()

    for method_ent in method_ents:
        if method_ent.contents().strip() != method_text:
            logger.error("Method content is different.")
            db.close()
            return False

        for ref in method_ent.refs("Use,Call"):
            if ref.ent().parent() is not None:
                if ref.ent().parent().simplename() in children_classes:
                    logger.error("Method has internal dependencies.")
                    db.close()
                    return False

    for mth in db.ents("Java Method"):
        for child in children_classes:
            if mth.longname().endswith(child + "." + method_name):
                fileslist_to_be_rafeactored.add(
                    mth.parent().parent().longname())
                for fth in mth.parent().refs("Extend"):
                    destination_class = fth.ent().longname()
                    fileslist_to_be_rafeactored.add(
                        fth.ent().parent().longname())
                for ref in mth.refs("Java Callby"):
                    propagation_classes.add(ref.ent().parent().longname())
                    fileslist_to_be_propagate.add(
                        ref.ent().parent().parent().longname())

    db.close()

    # print("=========================================")
    # print("fileslist_to_be_propagate :", fileslist_to_be_propagate)
    # print("propagation_classes : ", propagation_classes)
    # print("fileslist_to_be_rafeactored :", fileslist_to_be_rafeactored)
    # print("father class :", destination_class)

    fileslist_to_be_rafeactored = list(fileslist_to_be_rafeactored)
    fileslist_to_be_propagate = list(fileslist_to_be_propagate)
    propagation_class = list(propagation_classes)

    # refactored start
    for file in fileslist_to_be_rafeactored:
        try:
            stream = FileStream(file, encoding='utf-8', errors='ignore')
        except:
            continue
        lexer = JavaLexer(stream)
        token_stream = CommonTokenStream(lexer)
        parser = JavaParserLabeled(token_stream)
        parser.getTokenStream()
        parse_tree = parser.compilationUnit()
        my_listener_refactor = PullUpMethodRefactoringListener(
            common_token_stream=token_stream,
            destination_class=destination_class,
            children_class=children_classes,
            moved_methods=method_name,
            method_text=method_text)
        walker = ParseTreeWalker()
        walker.walk(t=parse_tree, listener=my_listener_refactor)

        with open(file, mode='w', encoding='utf-8', newline='') as f:
            f.write(
                my_listener_refactor.token_stream_rewriter.getDefaultText())
    # end refactoring

    # beginning of propagate
    for file in fileslist_to_be_propagate:
        if not os.path.exists(file):
            continue
        stream = FileStream(file, encoding='utf-8', errors='ignore')
        lexer = JavaLexer(stream)
        token_stream = CommonTokenStream(lexer)
        parser = JavaParserLabeled(token_stream)
        parser.getTokenStream()
        parse_tree = parser.compilationUnit()
        my_listener_propagate = PropagationPullUpMethodRefactoringListener(
            token_stream_rewriter=token_stream,
            old_class_name=children_classes,
            new_class_name=destination_class,
            propagated_class_name=propagation_class)
        walker = ParseTreeWalker()
        walker.walk(t=parse_tree, listener=my_listener_propagate)

        with open(file, mode='w', encoding='utf8', errors='ignore',
                  newline='') as f:
            f.write(
                my_listener_propagate.token_stream_rewriter.getDefaultText())
    # end of propagate

    return True
Beispiel #18
0
def main(udb_path, source_package, source_class, method_name,
         target_classes: list, *args, **kwargs):
    """

    The main API for the push-down method refactoring operation

    """
    target_package = source_package
    source_method = method_name

    main_file = None
    source_method_entity = None
    is_static = False
    propagation_files = []
    propagation_classes = []
    propagation_lines = []
    children_classes = []
    children_files = []

    # Initialize with understand
    db = und.open(udb_path)
    methods = db.ents("Java Method")
    for mth in methods:
        if mth.longname(
        ) == source_package + "." + source_class + "." + source_method:
            source_method_entity = mth
            for child_ref in mth.parent().refs("Extendby"):
                child_ref = child_ref.ent()
                if child_ref.simplename() in target_classes:
                    children_classes.append(child_ref.simplename())
                    children_files.append(child_ref.parent().longname())
            # print("mainfile : ", mth.parent().parent().longname())
            is_static = mth.kind().check("static")
            main_file = mth.parent().parent().longname()
            for ref in mth.refs("Callby"):
                propagation_files.append(
                    ref.ent().parent().parent().longname())
                propagation_classes.append(ref.ent().parent().simplename())
                propagation_lines.append(ref.line())

    # Check pre-condition
    if not len(target_classes) == 1:
        logger.error(f"len(target_classes) is not 1.")
        db.close()
        return False

    if not len(children_classes) == 1:
        logger.error(f"len(children_classes) is not 1.")
        db.close()
        return False

    if not len(children_files) == 1:
        logger.error(f"len(children_files) is not 1.")
        db.close()
        return False

    for mth in methods:
        if mth.simplename() == source_method:
            if mth.parent().simplename() in target_classes:
                if mth.type() == source_method_entity.type():
                    if mth.kind() == source_method_entity.kind():
                        if mth.parameters() == source_method_entity.parameters(
                        ):
                            logger.error("Duplicated method")
                            db.close()
                            return False

    for ref in source_method_entity.refs("use, call"):
        ref_ent = ref.ent()
        is_public = ref_ent.kind().check("public")
        if not is_public:
            logger.error("Has internal dependencies.")
            db.close()
            return False

    #  get text
    method_text = source_method_entity.contents()

    db.close()

    # Delete source method
    stream = FileStream(main_file, encoding='utf8', errors='ignore')
    lexer = JavaLexer(stream)
    token_stream = CommonTokenStream(lexer)
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    parse_tree = parser.compilationUnit()
    my_listener = DeleteSourceListener(common_token_stream=token_stream,
                                       source_method=source_method)
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)
    # print(my_listener.token_stream_rewriter.getDefaultText())
    with open(main_file, mode='w', encoding='utf-8', newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())

    # Do the push down
    for child_file, child_class in zip(children_files, children_classes):
        stream = FileStream(child_file, encoding='utf8', errors='ignore')
        lexer = JavaLexer(stream)
        token_stream = CommonTokenStream(lexer)
        parser = JavaParserLabeled(token_stream)
        parser.getTokenStream()
        parse_tree = parser.compilationUnit()
        my_listener = PushDownMethodRefactoringListener(
            common_token_stream=token_stream,
            source_class=child_class,
            source_method_text=method_text)
        walker = ParseTreeWalker()
        walker.walk(t=parse_tree, listener=my_listener)
        # print(my_listener.token_stream_rewriter.getDefaultText())
        with open(child_file, mode='w', encoding='utf8', newline='') as f:
            f.write(my_listener.token_stream_rewriter.getDefaultText())

    # Propagation
    for file, _class, line in zip(propagation_files, propagation_classes,
                                  propagation_lines):
        stream = FileStream(file, encoding='utf8', errors='ignore')
        lexer = JavaLexer(stream)
        token_stream = CommonTokenStream(lexer)
        parser = JavaParserLabeled(token_stream)
        parser.getTokenStream()
        parse_tree = parser.compilationUnit()
        if is_static:
            my_listener = PropagationStaticListener(
                common_token_stream=token_stream,
                source_class=source_class,
                child_class=children_classes[0],
                class_name=_class,
                method_name=source_method,
                ref_line=line,
                target_package=target_package)
        else:
            my_listener = PropagationNonStaticListener(
                common_token_stream=token_stream,
                source_class=source_class,
                child_class=children_classes[0],
                class_name=_class,
                method_name=source_method,
                ref_line=line,
                target_package=target_package)
        walker = ParseTreeWalker()
        walker.walk(t=parse_tree, listener=my_listener)
        # print(my_listener.token_stream_rewriter.getDefaultText())
        with open(file, mode='w', encoding='utf8', errors='ignore',
                  newline='') as f:
            f.write(my_listener.token_stream_rewriter.getDefaultText())

    return True
Beispiel #19
0
def main(args):
    files = get_file_dirs(args.dir)

    create_new_project_dir('JavaProjectRefactored', files)
    ref = input(" choose your refactoring :")

    for file in files:

        # Step 1: Load input source into stream

        m = re.search(r'^.*\.java$', file)
        if m is None:
            continue

        print(file)

        stream = FileStream(file, encoding='utf8')
        # input_stream = StdinStream()

        # Step 2: Create an instance of AssignmentStLexer
        lexer = JavaLexer(stream)
        # Step 3: Convert the input source into a list of tokens
        token_stream = CommonTokenStream(lexer)
        # Step 4: Create an instance of the AssignmentStParser
        parser = JavaParserLabeled(token_stream)

        parser.getTokenStream()
        # Step 5: Create parse tree
        parse_tree = parser.compilationUnit()
        # Step 6: Create an instance of AssignmentStListener

        # my_listener = RenameClassRefactoringListener(common_token_stream=token_stream, class_new_name='Z',
        #                                                 class_identifier='A', package_identifier="Dummy")
        if ref == "Rename":
            print("Rename class  =>")
            my_listener = RenameClassRefactoringListener(common_token_stream=token_stream, class_new_name='Z',
                                                         class_identifier='ReflectiveXmlRpcMetaDataHandler',
                                                         package_identifier="org.apache.xmlrpc.metadata")
        elif ref == "Static":
            print("Make field static  =>")
            my_listener = MakeFieldStaticRefactoringListener(common_token_stream=token_stream,
                                                             field_identifier='methodHelp',
                                                             class_identifier='ReflectiveXmlRpcMetaDataHandler',
                                                             package_identifier="org.apache.xmlrpc.metadata")
        elif ref == "Non-Static":
            print("Make field Non static  =>")
            my_listener = MakeFieldNonStaticRefactoringListener(common_token_stream=token_stream,
                                                                field_identifier='log',
                                                                class_identifier='XmlRpcErrorLogger',
                                                                package_identifier="org.apache.xmlrpc.server")
        else:
            print("Not Valid")
            break

        walker = ParseTreeWalker()
        walker.walk(t=parse_tree, listener=my_listener)

        if ref == "Non Static" and my_listener.canceled:
            rewrite_project(files, 'JavaProjectRefactored')
            break

        if ref == "Rename" and my_listener.in_selected_class:
            splited_dir = file.split('/')
            splited_dir[0] = 'JavaProjectRefactored'
            if os.path.exists("/".join(splited_dir)):
                os.remove("/".join(splited_dir))
            splited_dir[-1] = my_listener.class_new_name + ".java"
            with open("/".join(splited_dir), mode='w', newline='') as f:
                f.write(my_listener.token_stream_rewriter.getDefaultText())
        else:
            splited_dir = file.split('/')
            splited_dir[0] = 'JavaProjectRefactored'
            with open("/".join(splited_dir), mode='w', newline='') as f:
                f.write(my_listener.token_stream_rewriter.getDefaultText())
Beispiel #20
0
def main(udb, child, parent):
    # initialize with understand
    udb_path = "/home/ali/Desktop/code/TestProject/TestProject.udb"
    child_path_file = ""
    father_path_file = ""
    file_list_to_be_propagate = set()
    propagate_classes = set()
    # db = und.open(udb)
    # for cls in db.ents("class"):
    #     if cls.simplename() == child:
    #         child_path_file = cls.parent().longname()
    #         for ref in cls.refs("Coupleby"):
    #             propagate_classes.add(ref.ent().longname())
    #             file_list_to_be_propagate.add(ref.ent().parent().longname())
    #     if cls.simplename() == parent:
    #         father_path_file = cls.parent().longname()

    file_list_to_be_propagate = list(file_list_to_be_propagate)
    propagate_classes = list(propagate_classes)

    stream = FileStream(child_path_file, encoding='utf8', errors='ignore')
    lexer = JavaLexer(stream)
    token_stream = CommonTokenStream(lexer)
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    parse_tree = parser.compilationUnit()
    my_listener_field_text = CollapseHierarchyRefactoringGetFieldTextListener(
        common_token_stream=token_stream, child_class=child)
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener_field_text)
    field_code = my_listener_field_text.fieldcode

    my_listener_method_text = CollapseHierarchyRefactoringGetMethodTextListener(
        common_token_stream=token_stream, child_class=child)
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener_method_text)
    methods_code = my_listener_method_text.methodcode

    # Remove child class
    my_listener_remove_child_class = RemoveClassRefactoringListener(
        common_token_stream=token_stream, class_name=child)
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener_remove_child_class)
    with open(child_path_file, mode='w', newline='') as f:
        f.write(my_listener_remove_child_class.token_stream_rewriter.
                getDefaultText())
    # Refactor
    stream = FileStream(father_path_file, encoding='utf8', errors='ignore')
    lexer = JavaLexer(stream)
    token_stream = CommonTokenStream(lexer)
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    parse_tree = parser.compilationUnit()
    my_listener_refactor_action = CollapseHierarchyRefactoringListener(
        common_token_stream=token_stream,
        parent_class=parent,
        child_class=child,
        field_text=field_code,
        method_text=methods_code)
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener_refactor_action)

    with open(father_path_file, mode='w', newline='') as f:
        f.write(
            my_listener_refactor_action.token_stream_rewriter.getDefaultText())
    # Propagate
    for file in file_list_to_be_propagate:
        stream = FileStream(file, encoding='utf8', errors='ignore')
        lexer = JavaLexer(stream)
        token_stream = CommonTokenStream(lexer)
        parser = JavaParserLabeled(token_stream)
        parser.getTokenStream()
        parse_tree = parser.compilationUnit()
        my_listener_propagate = PropagationCollapseHierarchyListener(
            token_stream_rewriter=token_stream,
            old_class_name=child,
            new_class_name=parent,
            propagated_class_name=propagate_classes)
        walker = ParseTreeWalker()
        walker.walk(t=parse_tree, listener=my_listener_propagate)

        with open(file, mode='w', newline='') as f:
            f.write(
                my_listener_propagate.token_stream_rewriter.getDefaultText())
Beispiel #21
0
                            index=ctx.start.tokenIndex,
                            text=self.object + '.',
                            program_name=self.token_stream_rewriter.
                            DEFAULT_PROGRAM_NAME)


if __name__ == '__main__':
    udb_path = "/home/ali/Desktop/code/TestProject/TestProject.udb"
    source_class = "Shape"
    # initialize with understand
    main_file = ""
    db = und.open(udb_path)
    for cls in db.ents("class"):
        if cls.simplename() == source_class:
            main_file = cls.parent().longname()

    stream = FileStream(main_file, encoding='utf8', errors='ignore')
    lexer = JavaLexer(stream)
    token_stream = CommonTokenStream(lexer)
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    parse_tree = parser.compilationUnit()
    my_listener = MakeConcreteClassRefactoringListener(
        common_token_stream=token_stream, class_name=source_class)
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)

    with open(main_file, mode='w', newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())
    db.close()
Beispiel #22
0
def main():
    """
    it builds the parse tree and walk its corresponding walker so that our overridden methods run.
    """

    # udb_path = "/home/ali/Desktop/code/TestProject/TestProject.udb"
    # udb_path=create_understand_database("C:\\Users\\asus\\Desktop\\test_project")
    # source_class = "GodClass"
    # moved_methods = ['method1', 'method3', ]
    # moved_fields = ['field1', 'field2', ]
    udb_path = "C:\\Users\\asus\\Desktop\\test_project\\test_project.udb"
    # moved_methods = ['getValue', 'rowToJSONArray', 'getVal', ]
    # moved_fields = ['number_2', 'number_1', ]

    source_class = "GodClass"
    moved_methods = ['method1', 'method3']
    moved_fields = ['field1', 'field2']
    father_path_file = "/data/Dev/JavaSample/src/GodClass.java"
    father_path_directory = "/data/Dev/JavaSample/src"
    path_to_refactor = "/data/Dev/JavaSample/src"
    new_class_file = "/data/Dev/JavaSample/src/GodSubClass.java"

    # source_class = "TaskNode"
    # moved_methods = ['getUserObject']
    # moved_fields = []
    # father_path_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\ganttproject\\ganttproject\\src\\main\\java\\net\\sourceforge\\ganttproject\\task\\TaskNode.java"
    # father_path_directory = "C:\\Users\\asus\\Desktop\\benchmark_projects\\ganttproject\\ganttproject\\src\\main\\java\\net\\sourceforge\\ganttproject\\task"
    # path_to_refactor = "C:\\Users\\asus\\Desktop\\benchmark_projects\\ganttproject"
    # new_class_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\ganttproject\\ganttproject\\src\\main\\java\\net\\sourceforge\\ganttproject\\task\\TaskNodeextracted.java"

    # source_class = "SecuritySupport"
    # moved_methods = ['getSystemProperty']
    # moved_fields = []
    # father_path_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j\\src\\org\\apache\\html\\dom\\SecuritySupport.java"
    # father_path_directory = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j\\src\\org\\apache\\html\\dom"
    # path_to_refactor = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j"
    # new_class_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j\\src\\org\\apache\\html\\dom\\SecuritySupportextracted.java"

    # source_class = "BaseMarkupSerializer"
    # moved_methods = ['setOutputCharStream']
    # moved_fields = []
    # father_path_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j\\src\\org\\apache\\xml\\serialize\\BaseMarkupSerializer.java"
    # father_path_directory = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j\\src\\org\\apache\\xml\\serialize"
    # path_to_refactor = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j"
    # new_class_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j\\src\\org\\apache\\xml\\serialize\\BaseMarkupSerializerextracted.java"

    # source_class = "Piece"
    # moved_methods = ['setX']
    # moved_fields = []
    # father_path_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\Chess_master\\src\\game\\Piece.java"
    # father_path_directory = "C:\\Users\\asus\\Desktop\\benchmark_projects\\Chess_master\\src\\game"
    # path_to_refactor = "C:\\Users\\asus\\Desktop\\benchmark_projects\\Chess_master"
    # new_class_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\Chess_master\\src\\game\\Pieceextracted.java"

    stream = FileStream(father_path_file, encoding='utf8')
    lexer = JavaLexer(stream)
    token_stream = CommonTokenStream(lexer)
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    parse_tree = parser.compilationUnit()
    my_listener = ExtractSubClassRefactoringListener(
        common_token_stream=token_stream,
        source_class=source_class,
        new_class=source_class + "extracted",
        moved_fields=moved_fields,
        moved_methods=moved_methods,
        output_path=father_path_directory)
    walker = ParseTreeWalker()
    walker.walk(t=parse_tree, listener=my_listener)

    with open(father_path_file, mode='w', newline='') as f:
        f.write(my_listener.token_stream_rewriter.getDefaultText())

    extractJavaFilesAndProcess(path_to_refactor, father_path_file,
                               new_class_file)

    for file in files_to_refactor:
        stream = FileStream(file, encoding='utf8')
        lexer = JavaLexer(stream)
        token_stream = CommonTokenStream(lexer)
        parser = JavaParserLabeled(token_stream)
        parser.getTokenStream()
        parse_tree = parser.compilationUnit()

        my_listener = FindUsagesListener(common_token_stream=token_stream,
                                         source_class=source_class,
                                         new_class=source_class + "extracted",
                                         moved_fields=moved_fields,
                                         moved_methods=moved_methods,
                                         output_path=father_path_directory)

        # output_path=father_path_directory)

        walker = ParseTreeWalker()
        walker.walk(t=parse_tree, listener=my_listener)

        tmp_aul = my_listener.aul

        with open(file, mode='w', newline='') as f:
            f.write(my_listener.token_stream_rewriter.getDefaultText())

        # after find usages

        try:
            stream = FileStream(file, encoding='utf8')
            lexer = JavaLexer(stream)
            token_stream = CommonTokenStream(lexer)
            parser = JavaParserLabeled(token_stream)
            parser.getTokenStream()
            parse_tree = parser.compilationUnit()

            my_listener = PropagationListener(
                common_token_stream=token_stream,
                source_class=source_class,
                new_class=source_class + "extracted",
                moved_fields=moved_fields,
                moved_methods=moved_methods,
                output_path=father_path_directory,
                aul=tmp_aul)

            walker = ParseTreeWalker()
            walker.walk(t=parse_tree, listener=my_listener)

            with open(file, mode='w', newline='') as f:
                f.write(my_listener.token_stream_rewriter.getDefaultText())
        except:
            print("not utf8")
Beispiel #23
0
def main(args):
    global f_iteration_flag

    # Step 1: Load input source into stream
    stream = FileStream(args.file, encoding='utf8')
    # Step 2: Create an instance of AssignmentStLexer
    lexer = JavaLexer(stream)
    # Step 3: Convert the input source into a list of tokens
    token_stream = CommonTokenStream(lexer)
    # Step 4: Create an instance of the AssignmentStParser
    parser = JavaParserLabeled(token_stream)
    parser.getTokenStream()
    # Step 5: Create parse tree
    parse_tree = parser.compilationUnit()
    # Step 6: Create an instance of AssignmentStListener
    if f_iteration_flag:
        my_listener = MoveClassRefactoringListener(
            common_token_stream=token_stream,
            source_package=source_package,
            target_package=target_package,
            class_identifier=class_identifier,
            filename=args.file,
            dirname=directory)
        walker = ParseTreeWalker()
        walker.walk(t=parse_tree, listener=my_listener)

        with open(args.file, mode='w', newline='') as f:
            f.write(my_listener.token_stream_rewriter.getDefaultText().replace(
                "\r", ""))

    else:
        has_import = False
        has_exact_import = False

        file_to_check = open(file=args.file, mode='r')
        for line in file_to_check.readlines():
            text_line = line.replace('\n', '').replace('\r', '').strip()
            if text_line.startswith('import') and text_line.endswith(
                    source_package + '.' + class_identifier + ';'):
                has_import = True
                break
            if text_line.startswith('import') and text_line.endswith(
                    target_package + '.' + class_identifier + ';'):
                has_exact_import = True
                break

        if not has_exact_import:
            print(
                f"Start checking file \"{file_to_check.name}\" *** {file_counter}/100"
            )

            my_listener = ReplaceDependentObjectsListener(
                common_token_stream=token_stream,
                source_package=source_package,
                target_package=target_package,
                class_identifier=class_identifier,
                filename=args.file,
                has_import=has_import)
            walker = ParseTreeWalker()
            walker.walk(t=parse_tree, listener=my_listener)

            with open(args.file, mode='w', newline='') as f:
                f.write(
                    my_listener.token_stream_rewriter.getDefaultText().replace(
                        "\r", ""))

            print(
                f"Finish checking file \"{file_to_check.name}\" *** {file_counter}/100"
            )