def main(udb_path, source_class, method_name): print("Make Method Non Static") main_file = "" db = und.open(udb_path) for cls in db.ents("class"): if cls.simplename() == source_class: main_file = cls.parent().longname(True) if not os.path.isfile(main_file): continue if main_file is None: return stream = FileStream(main_file, encoding='utf8') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener = MakeMethodNonStaticRefactoringListener(common_token_stream=token_stream, source_class=source_class, method_name=method_name) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) with open(main_file, mode='w', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText())
def main(): Path = "../tests/rename_tests/" rename_method_test_file = FileStream(str(Path + "rename_field_test.java")) print("file opened") Refactored = open(os.path.join(Path, "rename_field_test_Refactored.java"), 'w', newline='') Lexer = JavaLexer(rename_method_test_file) TokenStream = CommonTokenStream(Lexer) Parser = JavaParserLabeled(TokenStream) Tree = Parser.compilationUnit() findObjects = FindObjects("SuggestedRoomsByFollowingsListViewAdapter") Walker = ParseTreeWalker() Walker.walk(findObjects, Tree) ListenerForReRename = RenameFieldRefactoringListener( TokenStream, "SuggestedRoomsByFollowingsListViewAdapter", "mContext", "field_New") Walker = ParseTreeWalker() Walker.walk(ListenerForReRename, Tree) Refactored.write( ListenerForReRename.token_stream_rewriter.getDefaultText()) print("tamam shod")
def main(args, i): # Step 1: Load input source into stream stream = FileStream(args.file, encoding='utf8') # Step 2: Create an instance of AssignmentStLexer lexer = JavaLexer(stream) # Step 3: Convert the input source into a list of tokens token_stream = CommonTokenStream(lexer) # Step 4: Create an instance of the AssignmentStParser parser = JavaParserLabeled(token_stream) parser.getTokenStream() # Step 5: Create parse tree parse_tree = parser.compilationUnit() # Step 6: Create an instance of the refactoringListener, and send as a parameter the list of tokens to the class # my_listener = EncapsulateFiledRefactoringListener(common_token_stream=token_stream, class_identifier='A') my_listener = SingletonRefactoringListener( common_token_stream=token_stream, class_identifier='GeneralPurposeBit') walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) newpath = "Refactored" + args.file if not os.path.exists(os.path.dirname(newpath)): try: os.makedirs(os.path.dirname(newpath)) except OSError as exc: # Guard against race condition pass with open(newpath, mode='w', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText())
class ReplaceParameterWithQueryAPI: def __init__(self, file_path, target_class, target_method, target_parameters): self.file_path = file_path self.new_file_path = file_path self.target_class = target_class self.target_method = target_method self.target_parameters = target_parameters self.stream = FileStream(self.file_path, encoding="utf8") self.lexer = JavaLexer(self.stream) self.token_stream = CommonTokenStream(self.lexer) self.parser = JavaParserLabeled(self.token_stream) self.tree = self.parser.compilationUnit() self.walker = ParseTreeWalker() def do_refactor(self): listener = ReplaceParameterWithQueryListener( common_token_stream=self.token_stream, target_class=self.target_class, target_method=self.target_method, target_parameters=self.target_parameters) self.walker.walk(listener=listener, t=self.tree) print(listener.add_to_target_method) print(listener.token_stream_rewriter.getDefaultText()) with open(self.new_file_path, mode="w", newline="") as f: f.write(listener.token_stream_rewriter.getDefaultText())
def extract_subclass(self): # udb_path = "/home/ali/Desktop/code/TestProject/TestProject.udb" # udb_path=create_understand_database("C:\\Users\\asus\\Desktop\\test_project") # source_class = "GodClass" # moved_methods = ['method1', 'method3', ] # moved_fields = ['field1', 'field2', ] udb_path = "C:\\Users\\asus\\Desktop\\test_project\\test_project.udb" source_class = "CDL" moved_methods = [ 'getValue', 'rowToJSONArray', 'getVal', ] moved_fields = [ 'number', 'number_2', 'number_1', ] # initialize with understand father_path_file = "" file_list_to_be_propagate = set() propagate_classes = set() db = und.open(udb_path) # db=open(udb_path) for cls in db.ents("class"): if (cls.simplename() == source_class): father_path_file = cls.parent().longname() for ref in cls.refs("Coupleby"): # print(ref.ent().longname()) propagate_classes.add(ref.ent().longname()) # print(ref.ent().parent().relname()) # file_list_to_be_propagate.add(ref.ent().parent().relname()) # if(cls.longname()==fatherclass): # print(cls.parent().relname()) # father_path_file=cls.parent().relname() father_path_file = "C:\\Users\\asus\\Desktop\\test_project\\CDL.java" father_path_directory = "C:\\Users\\asus\\Desktop\\test_project" stream = FileStream(father_path_file, encoding='utf8') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener = ExtractSubClassRefactoringListener( common_token_stream=token_stream, source_class=source_class, new_class=source_class + "extracted", moved_fields=moved_fields, moved_methods=moved_methods, output_path=father_path_directory) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) with open(father_path_file, mode='w', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText())
def do_refactor(self): db = und.open(self.udb_path) self.get_source_class_map(db=db) listener = ExtractClassRefactoringListener( common_token_stream=self.token_stream, new_class=self.new_class, source_class=self.source_class, moved_fields=self.moved_fields, moved_methods=self.moved_methods, method_map=self.method_usage_map) self.object_name = listener.object_name self.walker.walk(listener=listener, t=self.tree) # Find Field and Method Usages field_usages = [] for field in self.moved_fields: for ent in db.lookup(f"{self.source_class}.{field}"): # print(ent.name(), " [", ent.kindname(), "]", sep="", end="\n") for ref in ent.refs("useBy, setBy, modifyBy"): if Path(ref.file().longname()) == Path(self.file_path): continue field_usage = { 'field_name': field, 'file_path': ref.file().longname() } if field_usage not in field_usages: field_usages.append(field_usage) # print(listener.token_stream_rewriter.getDefaultText()) # print("=" * 25) # print(listener.code) stream = InputStream(listener.code) lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener = NewClassPropagation(common_token_stream=token_stream, method_map=self.method_usage_map, source_class=self.source_class, moved_fields=self.moved_fields) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) # print(my_listener.token_stream_rewriter.getDefaultText()) # Write Changes with open(self.file_path, 'w') as f: f.write(listener.token_stream_rewriter.getDefaultText()) with open(self.new_file_path, 'w') as f: f.write(my_listener.token_stream_rewriter.getDefaultText()) # Propagate and reformat self.propagate_fields(field_usages) self.reformat(self.file_path) self.reformat(self.new_file_path) db.close()
def main(): Path = "../tests/rename_tests/benchmark" Package_name = "org.json" class_identifier = "CDL" new_class_name = "test" FolderPath = os.listdir(Path) testsPath = os.listdir(Path + "/refactoredFiles/") # delete last refactored files for t in testsPath: os.remove(os.path.join(Path + "/refactoredFiles/", t)) for File in FolderPath: # We have all of the java files in this folder now if File.endswith('.java'): EachFilePath = Path + "/" + File print(" ****************" + " in file : " + File + " ****************") EachFile = FileStream(str(EachFilePath)) FileName = File.split(".")[0] Refactored = open(Path + "/refactoredFiles/" + FileName + "_Refactored.java", 'w', newline='') Lexer = JavaLexer(EachFile) TokenStream = CommonTokenStream(Lexer) Parser = JavaParserLabeled(TokenStream) Tree = Parser.compilationUnit() ListenerForReRenameClass =\ RenameClassRefactoringListener(TokenStream, Package_name, class_identifier, new_class_name) Walker = ParseTreeWalker() Walker.walk(ListenerForReRenameClass, Tree) Refactored.write(ListenerForReRenameClass.token_stream_rewriter. getDefaultText()) print("changing public class files name... ") for i in range(len(old_names)): os.rename( Path + "/refactoredFiles/" + old_names[i] + "_Refactored.java", Path + "/refactoredFiles/" + new_names[i] + "_Refactored.java") print(" %%%%%%%%%%%%%" + " all files finished " + "****************")
def extract_method(conf): stream = FileStream(conf['target_file'], encoding="utf-8") lexer = JavaLexer(stream) tokens = CommonTokenStream(lexer) parser = JavaParserLabeled(tokens) tree = parser.compilationUnit() # TODO : too many params for ExtractMethodRefactoring constructor listener = ExtractMethodRefactoring(conf['target_package'], conf['target_class'], conf['target_method'], conf['lines']) walker = ParseTreeWalker() walker.walk(listener=listener, t=tree) # print(parser.getTokenStream()) # print(listener.variable_info) output = [] file1 = open(conf['target_file'], 'r', encoding="utf-8") lines = file1.readlines() line_num = 1 # func_added = False func = [] print('extracting following lines:') for line in lines: if listener.lines.__contains__(line_num): print(line, end='') if line_num == min(listener.lines): output.append('\t\t' + conf['new_method_name'] + get_args(listener.used_variables)) if listener.remain_lines.__contains__(line_num): output.append(line) func.append(line) elif line_num == listener.method_stop_line: output.append(line) output.append( '\tpublic void ' + conf['new_method_name'] + get_args_with_type(listener.used_variables, listener.variable_info['variables']) + '\n') output.append('\t{\n') output = output + func output.append('\t}\n') else: output.append(line) line_num += 1 file1.close() print('--------------------') file2 = open(conf['output_file'], 'w', encoding="utf-8") for item in output: file2.write(item) file2.close()
def main(): udb_path = "/home/ali/Desktop/code/TestProject/TestProject.udb" source_class = "GodClass" moved_methods = [ 'method1', 'method3', ] moved_fields = [ 'field1', 'field2', ] # initialize with understand father_path_file = "" file_list_to_be_propagate = set() propagate_classes = set() db = und.open(udb_path) for cls in db.ents("class"): if (cls.simplename() == source_class): father_path_file = cls.parent().longname() for ref in cls.refs("Coupleby"): # print(ref.ent().longname()) propagate_classes.add(ref.ent().longname()) # print(ref.ent().parent().relname()) # file_list_to_be_propagate.add(ref.ent().parent().relname()) # if(cls.longname()==fatherclass): # print(cls.parent().relname()) # father_path_file=cls.parent().relname() stream = FileStream(father_path_file, encoding='utf8') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener = ExtractSubClassRefactoringListener( common_token_stream=token_stream, source_class=source_class, new_class=source_class + "extracted", moved_fields=moved_fields, moved_methods=moved_methods) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) with open(father_path_file, mode='w', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText())
class RemoveFlagArgument: """Refactoring API that can be used to to do remove flag argument """ def __init__(self, source_class="Playground", source_method="DeliveryDate", argument_name="b", main_file="playground.java"): """create a removeflagargument refactor Args: source_class (str): class name contaminated by code smell. source_mathod (str): method name contaminated. argument_name (str): boolean argument in method. main_file (str): path of main file containing source class. """ self.source_class = source_class self.source_method = source_method self.arguemnt_name = argument_name self.main_file = main_file self.stream = FileStream(self.main_file, encoding='utf8', errors='ignore') self.lexer = JavaLexer(self.stream) self.token_stream = CommonTokenStream(self.lexer) self.parser = JavaParserLabeled(self.token_stream) self.parser.getTokenStream() self.parse_tree = self.parser.compilationUnit() self.my_listener = RemoveFlagArgumentListener( common_token_stream=self.token_stream, source_class=self.source_class, source_method=self.source_method, argument_name=self.arguemnt_name) def do_refactor(self): """removes flag argument logic and replace it by two method call of the new method generated from extracted login in if else block """ walker = ParseTreeWalker() walker.walk(t=self.parse_tree, listener=self.my_listener) # self.my_listener.body_1 with open(self.main_file, 'w') as f: f.write(self.my_listener.token_stream_rewriter.getDefaultText())
def propagate_fields(self, usages): for usage in usages: file_path = usage.pop('file_path') stream = FileStream(file_path, encoding='utf-8', errors='ignore') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parse_tree = parser.compilationUnit() my_listener = PropagateFieldUsageListener(common_token_stream=token_stream, object_name=self.object_name, **usage) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) # print(my_listener.token_stream_rewriter.getDefaultText()) with open(file_path, mode='w', encoding='utf-8', errors='ignore') as f: f.write(my_listener.token_stream_rewriter.getDefaultText()) self.reformat(file_path)
def main(udb_path, source_class, method_name, *args, **kwargs): """ """ main_file = None db = und.open(udb_path) classes = db.ents("Class") for cls in classes: if cls.simplename() == source_class: if cls.parent() is not None: temp_file = str(cls.parent().longname(True)) if os.path.isfile(temp_file): main_file = temp_file break if main_file is None: db.close() return False db.close() stream = FileStream(main_file, encoding='utf8', errors='ignore') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener = MakeMethodStaticRefactoringListener( common_token_stream=token_stream, source_class=source_class, method_name=method_name) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) with open(main_file, mode='w', encoding='utf8', errors='ignore', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText()) return True
def find_usages(self, new_code): stream = InputStream(new_code) lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) tree = parser.compilationUnit() find_listener = FindClassUsagesListener( source_class=self.source_class, new_class=self.new_class, ) self.walker.walk(listener=find_listener, t=tree) change_listener = ChangeClassUsagesListener( common_token_stream=token_stream, source_class=self.source_class, new_class=self.new_class, moved_fields=self.moved_fields, moved_methods=self.moved_methods, usages=find_listener.usages) self.walker.walk(listener=change_listener, t=tree)
def get_parse_tree_token_stream(args): """ returns parse tree and token stream base on the file stream :param args: file arguments """ # Step 1: Load input source into stream stream = FileStream(args.file, encoding='utf8') # Step 2: Create an instance of AssignmentStLexer lexer = JavaLexer(stream) # Step 3: Convert the input source into a list of tokens token_stream = CommonTokenStream(lexer) # Step 4: Create an instance of the AssignmentStParser parser = JavaParserLabeled(token_stream) parser.getTokenStream() # Step 5: Create parse tree parse_tree = parser.compilationUnit() return parse_tree, token_stream
def main(args): # Step 1: Load input source into stream begin_time = time() stream = FileStream(args.file, encoding='utf8', errors='ignore') # input_stream = StdinStream() print('Input stream:') print(stream) # Step 2: Create an instance of AssignmentStLexer lexer = JavaLexer(stream) # Step 3: Convert the input source into a list of tokens token_stream = CommonTokenStream(lexer) # Step 4: Create an instance of the AssignmentStParser parser = JavaParserLabeled(token_stream) parser.getTokenStream() # Step 5: Create parse tree parse_tree = parser.compilationUnit() # Step 6: Create an instance of the refactoringListener, and send as a parameter the list of tokens to the class my_listener = VisitorPatternRefactoringListener( common_token_stream=token_stream, SuperClass_identifier='SC', SubClass_identifier=['CC1', 'CC2', 'CC3']) # SuperClass_identifier='ComputerPart', # SubClass_identifier=['Keyboard', 'Monitor', 'Mouse', 'Computer']) # SuperClass_identifier='Shape', # SubClass_identifier=['Polygon', 'Rectangle','Arrow']) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) print('Compiler result:') print(my_listener.token_stream_rewriter.getDefaultText()) with open('../tests/visitor1/VisitorExample0.refactored.java', mode='w', newline='') as f: # with open('VisitorExample1.refactored.java', mode='w', newline='') as f: # with open('VisitorExample2.refactored.java', mode='w', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText()) end_time = time() print("time execution : ", end_time - begin_time)
def add_implement_statement_to_class(self, ): stream = FileStream(self.class_path, encoding='utf8', errors='ignore') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() listener = AddingImplementStatementToClass( common_token_stream=token_stream, class_name=os.path.splitext(os.path.basename(self.class_path))[0], interface_package=self.interface_info['package'], interface_name=self.interface_info['name']) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=listener) with open(self.class_path, encoding='utf8', errors='ignore', mode='w', newline='') as f: f.write(listener.token_stream_rewriter.getDefaultText())
def main(): # folder_path = "..\\benchmark_projects\\JSON\\src\\main\\java\\org\\json" folder_path = "/data/Dev/JavaSample/" path = get_filenames_in_dir(folder_path) print(path) # target_class = "CDL" target_class = "ReplaceConstructorWithFactoryMethod" for file in path: if file.endswith('.java') and not file.endswith('_refactored.java'): stream = FileStream(file) lexer = JavaLexer(stream) tokens = CommonTokenStream(lexer) parser = JavaParserLabeled(tokens) tree = parser.compilationUnit() new_file = open(file, mode='w', newline='') listener = ReplaceConstructorWithFactoryFunctionRefactoringListener( common_token_stream=tokens, target_class=target_class) walker = ParseTreeWalker() walker.walk(listener=listener, t=tree) new_code = str(listener.codeRewrite.getDefaultText()) new_file.write(new_code)
def main(udb_path, target_class, target_methods): main_file = "" db = understand.open(udb_path) for cls in db.ents("class"): if cls.simplename() == target_class: main_file = cls.parent().longname() stream = FileStream(main_file, encoding='utf8') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener = MakeMethodNonStaticRefactoringListener( common_token_stream=token_stream, target_class=target_class, target_methods=target_methods) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) with open(main_file, mode='w', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText())
def rename_method(java_file_path, scope_class_name, target_method_name, new_name, reference=None): """Main Entry Point to the Listener and Tree Walker Args: java_file_path(str): Address path to the test/source file scope_class_name(str): Name of the class in which the refactoring has to be done target_method_name(str): Name of the method in which the refactoring has to be done new_name(str): The new name of the refactored method reference(str): Keeping track for all of the method references in the project scope Returns: No Returns """ stream = FileStream(java_file_path) lexer = JavaLexer(stream) tokens = CommonTokenStream(lexer) parser = JavaParserLabeled(tokens) tree = parser.compilationUnit() listener = RenameMethodListener(java_file_path=java_file_path, common_token_stream=tokens, scope_class_name=scope_class_name, target_method_name=target_method_name, new_name=new_name, reference=reference) walker = ParseTreeWalker() walker.walk(listener, tree) if listener.changed: print(java_file_path) new_file = open(file=java_file_path, mode='w') new_file.write(listener.token_stream_rewriter.getDefaultText().replace( '\r', ''))
def main(): path_ = "../tests/rename_tests/benchmark" package_name_ = "org.json" class_identifier_ = "HTTP" field_identifier_ = "CRLF" field_new_name_ = "test" folder_path = os.listdir(path_) tests_path = os.listdir(path_ + "/refactoredFiles/") # delete last refactored files for t in tests_path: os.remove(os.path.join(path_ + "/refactoredFiles/", t)) for file_ in folder_path: # We have all java files in this folder now if file_.endswith('.java'): file_path = path_ + "/" + file_ file_stream = FileStream(str(file_path)) file_name = file_.split(".")[0] refactored = open(path_ + "/refactoredFiles/" + file_name + "_Refactored.java", 'w', newline='') lexer = JavaLexer(file_stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) tree = parser.compilationUnit() rename_field_refactoring_listener = RenameFieldRefactoringListener( token_stream, package_name_, class_identifier_, field_identifier_, field_new_name_) walker = ParseTreeWalker() walker.walk(rename_field_refactoring_listener, tree) refactored.write(rename_field_refactoring_listener. token_stream_rewriter.getDefaultText()) print(" %%%%%%%%%%%%%" + " all files finished " + "****************")
def main(args): # Step 1: Load input source into stream stream = FileStream(args.file, encoding='utf8') # input_stream = StdinStream() # Step 2: Create an instance of AssignmentStLexer lexer = JavaLexer(stream) # Step 3: Convert the input source into a list of tokens token_stream = CommonTokenStream(lexer) # Step 4: Create an instance of the AssignmentStParser parser = JavaParserLabeled(token_stream) parser.getTokenStream() # Step 5: Create parse tree parse_tree = parser.compilationUnit() # Step 6: Create an instance of the refactoringListener, and send as a parameter the list of tokens to the class my_listener = FactoryMethodRefactoringListener(common_token_stream=token_stream, creator_identifier='FactoryMethod', products_identifier=['JpegReader', 'GifReader']) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) with open('FactoryExample.refactored.java', mode='w', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText())
def main(args): # Step 1: Load input source into stream stream = FileStream(args.file, encoding='utf8') # input_stream = StdinStream() # Step 2: Create an instance of AssignmentStLexer # lexer = Java9_v2Lexer(stream) # Deprecated lexer = JavaLexer(stream) # Current lexer # Step 3: Convert the input source into a list of tokens common_token_stream = CommonTokenStream(lexer) # Step 4: Create an instance of the AssignmentStParser # parser = Java9_v2Parser(common_token_stream) # Deprecated parser = JavaParserLabeled(common_token_stream) # Current parser # parser.getTokenStream() # Step 5: Create parse tree # 5.1. Python backend --> Low speed parse_tree = parser.compilationUnit() # 5.2. C++ backend --> high speed # parse_tree = sa_java9_v2.parse(stream, 'compilationUnit', None) # Deprecated # Step 6: Create an instance of AssignmentStListener my_listener = EncapsulateFiledRefactoringListener( common_token_stream=common_token_stream, field_identifier='f') # my_listener = ExtractClassRefactoringListener(common_token_stream=token_stream, class_identifier='Worker') # return walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) with open('../../tests/encapsulate_field_tests/input.refactored.java', mode='w', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText())
def main(args): # Step 1: Load input source into stream begin_time = time() stream = FileStream(args.file, encoding='utf8') # input_stream = StdinStream() print('Input stream:') print(stream) # Step 2: Create an instance of AssignmentStLexer lexer = JavaLexer(stream) # Step 3: Convert the input source into a list of tokens token_stream = CommonTokenStream(lexer) # Step 4: Create an instance of the AssignmentStParser parser = JavaParserLabeled(token_stream) parser.getTokenStream() # Step 5: Create parse tree parse_tree = parser.compilationUnit() # Step 6: Create an instance of the refactoringListener, and send as a parameter the list of tokens to the class my_listener = StrategyPatternRefactoringListener( common_token_stream=token_stream, method_identifier='execute') # method_identifier='read') # method_identifier='write') walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) print('Compiler result:') print(my_listener.token_stream_rewriter.getDefaultText()) with open('StrategyExample0.refactored.java', mode='w', newline='') as f: # with open('StrategyExample1.refactored.java', mode='w', newline='') as f: # with open('StrategyExample2.refactored.java', mode='w', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText()) end_time = time() print("execute time : ", end_time - begin_time)
def main(): #folder_path = "..\\benchmark_projects\\JSON\\src\\main\\java\\org\\json" folder_path = "..\\tests\\replace_constructor_with_factory_function_tests" path = os.listdir(folder_path) #target_class = "CDL" target_class = "Employee" for file in path: if file.endswith('.java') and not file.endswith('_refactored.java'): each = folder_path + "\\" + file stream = FileStream(str(each)) lexer = JavaLexer(stream) tokens = CommonTokenStream(lexer) parser = JavaParserLabeled(tokens) tree = parser.compilationUnit() new_file = open(os.path.join(folder_path, file + "_refactored.java"), mode='w', newline='') listener = ReplaceConstructorWithFactoryFunctionRefactoringListener(common_token_stream=tokens, target_class=target_class) walker = ParseTreeWalker() walker.walk( listener=listener, t=tree ) new_code = str(listener.codeRewrite.getDefaultText()) new_file.write(new_code)
def main(udb_path, source_package, source_class, method_name, target_classes: list, *args, **kwargs): """ The main API for the push-down method refactoring operation """ target_package = source_package source_method = method_name main_file = None source_method_entity = None is_static = False propagation_files = [] propagation_classes = [] propagation_lines = [] children_classes = [] children_files = [] # Initialize with understand db = und.open(udb_path) methods = db.ents("Java Method") for mth in methods: if mth.longname( ) == source_package + "." + source_class + "." + source_method: source_method_entity = mth for child_ref in mth.parent().refs("Extendby"): child_ref = child_ref.ent() if child_ref.simplename() in target_classes: children_classes.append(child_ref.simplename()) children_files.append(child_ref.parent().longname()) # print("mainfile : ", mth.parent().parent().longname()) is_static = mth.kind().check("static") main_file = mth.parent().parent().longname() for ref in mth.refs("Callby"): propagation_files.append( ref.ent().parent().parent().longname()) propagation_classes.append(ref.ent().parent().simplename()) propagation_lines.append(ref.line()) # Check pre-condition if not len(target_classes) == 1: logger.error(f"len(target_classes) is not 1.") db.close() return False if not len(children_classes) == 1: logger.error(f"len(children_classes) is not 1.") db.close() return False if not len(children_files) == 1: logger.error(f"len(children_files) is not 1.") db.close() return False for mth in methods: if mth.simplename() == source_method: if mth.parent().simplename() in target_classes: if mth.type() == source_method_entity.type(): if mth.kind() == source_method_entity.kind(): if mth.parameters() == source_method_entity.parameters( ): logger.error("Duplicated method") db.close() return False for ref in source_method_entity.refs("use, call"): ref_ent = ref.ent() is_public = ref_ent.kind().check("public") if not is_public: logger.error("Has internal dependencies.") db.close() return False # get text method_text = source_method_entity.contents() db.close() # Delete source method stream = FileStream(main_file, encoding='utf8', errors='ignore') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener = DeleteSourceListener(common_token_stream=token_stream, source_method=source_method) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) # print(my_listener.token_stream_rewriter.getDefaultText()) with open(main_file, mode='w', encoding='utf-8', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText()) # Do the push down for child_file, child_class in zip(children_files, children_classes): stream = FileStream(child_file, encoding='utf8', errors='ignore') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener = PushDownMethodRefactoringListener( common_token_stream=token_stream, source_class=child_class, source_method_text=method_text) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) # print(my_listener.token_stream_rewriter.getDefaultText()) with open(child_file, mode='w', encoding='utf8', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText()) # Propagation for file, _class, line in zip(propagation_files, propagation_classes, propagation_lines): stream = FileStream(file, encoding='utf8', errors='ignore') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() if is_static: my_listener = PropagationStaticListener( common_token_stream=token_stream, source_class=source_class, child_class=children_classes[0], class_name=_class, method_name=source_method, ref_line=line, target_package=target_package) else: my_listener = PropagationNonStaticListener( common_token_stream=token_stream, source_class=source_class, child_class=children_classes[0], class_name=_class, method_name=source_method, ref_line=line, target_package=target_package) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) # print(my_listener.token_stream_rewriter.getDefaultText()) with open(file, mode='w', encoding='utf8', errors='ignore', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText()) return True
def main(): Path = "../tests/remove_dead_code" FolderPath = os.listdir(Path) Identifier = [{ "Classes": ["Airplane"], "Methods": ["Car/Fly", "Engine/main2"], "Fields": ["Car/noway", "Engine/model"], "Variables": [], "Parameters": [] }, { "Classes": [], "Methods": [], "Fields": [], "Variables": [ "Car/main/dead", "Car/Run/number", "Engine/SetName/what", "Engine/main2/variable", "Engine/main2/dead" ], "Parameters": ["Car/Drive/wheels", "Engine/main/k", "Engine/SetName/last"] }, { "Classes": ["Airplane"], "Methods": ["Car/Fly", "Engine/main2"], "Fields": ["Car/noway", "Engine/model"], "Variables": [ "Car/main/dead", "Car/Run/number", "Engine/SetName/what", "Engine/main2/variable", "Engine/main2/dead" ], "Parameters": ["Car/Drive/wheels", "Engine/main/k", "Engine/SetName/last"] }] i = 0 for File in FolderPath: # We have all of the java files in this folder now if File.endswith('.java'): EachFilePath = Path + "\\" + File # Step 1: Load input source into stream EachFile = FileStream(str(EachFilePath)) # Step 2: Create an instance of AssignmentStLexer Lexer = JavaLexer(EachFile) # Step 3: Convert the input source into a list of tokens TokenStream = CommonTokenStream(Lexer) # Step 4: Create an instance of the AssignmentStParser Parser = JavaParserLabeled(TokenStream) # Step 5: Create parse tree Tree = Parser.compilationUnit() # ListenerForDetection = DetectCodeClass() # ListenerForDeadCodeDetection = DetectDeadCodeClass() if i < len(Identifier): # This is a new Java file which is the result Refactored = open(os.path.join(Path, File + "_Refactored.java"), mode='w', newline='') # Step 6: Create an instance of AssignmentStListener ListenerForRemovingDeadCode = RemoveDeadCodeClass( TokenStream, Identifier[i]) Walker = ParseTreeWalker() # Walk # Walker.walk(Listener, Tree) # Walker.walk(ListenerOnMainCode, Tree) Walker.walk(ListenerForRemovingDeadCode, Tree) NewCode = str( ListenerForRemovingDeadCode.CodeRewrite.getDefaultText()) Refactored.write(NewCode) i += 1
index=ctx.start.tokenIndex, text=self.object + '.', program_name=self.token_stream_rewriter. DEFAULT_PROGRAM_NAME) if __name__ == '__main__': udb_path = "/home/ali/Desktop/code/TestProject/TestProject.udb" source_class = "Shape" # initialize with understand main_file = "" db = und.open(udb_path) for cls in db.ents("class"): if cls.simplename() == source_class: main_file = cls.parent().longname() stream = FileStream(main_file, encoding='utf8', errors='ignore') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener = MakeConcreteClassRefactoringListener( common_token_stream=token_stream, class_name=source_class) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) with open(main_file, mode='w', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText()) db.close()
class ExtractClassAPI: def __init__(self, udb_path, file_path, source_class, new_class, moved_fields, moved_methods, new_file_path=None): self.file_path = file_path self.udb_path = udb_path self.new_file_path = new_file_path self.source_class = source_class self.new_class = new_class self.moved_fields = moved_fields self.moved_methods = moved_methods self.stream = FileStream(self.file_path, encoding="utf8") self.lexer = JavaLexer(self.stream) self.token_stream = CommonTokenStream(self.lexer) self.parser = JavaParserLabeled(self.token_stream) self.tree = self.parser.compilationUnit() self.walker = ParseTreeWalker() self.method_usage_map = {} self.pass_this = False self.TAB = "\t" self.object_name = "" def check_dependency_graph(self): listener = DependencyPreConditionListener( common_token_stream=self.token_stream, class_identifier=self.source_class) self.walker.walk(listener=listener, t=self.tree) component = sorted(self.moved_methods + self.moved_fields) if component in sorted(listener.connected_components): self.checked = True if len(listener.connected_components) == 0: self.checked = True def get_source_class_map(self, db): class_ents = db.lookup(self.source_class, "Class") class_ent = None for ent in class_ents: if Path(ent.parent().longname()) == Path(self.file_path): class_ent = ent break assert class_ent is not None for ref in class_ent.refs("Define", "Method"): method_ent = ref.ent() self.method_usage_map[method_ent.simplename()] = set() for use in method_ent.refs("SetBy UseBy ModifyBy, Call", "Variable ~Unknown, Method ~Unknown"): self.method_usage_map[method_ent.simplename()].add( use.ent().simplename()) def propagate_fields(self, usages): for usage in usages: file_path = usage.pop('file_path') stream = FileStream(file_path, encoding='utf8') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parse_tree = parser.compilationUnit() my_listener = PropagateFieldUsageListener( common_token_stream=token_stream, object_name=self.object_name, **usage) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) # print(my_listener.token_stream_rewriter.getDefaultText()) with open(file_path, 'w') as f: f.write(my_listener.token_stream_rewriter.getDefaultText()) self.reformat(file_path) @staticmethod def reformat(file_path: str): # formatter = os.path.abspath("../assets/formatter/google-java-format-1.10.0-all-deps.jar") # subprocess.call(["java", "-jar", formatter, "--replace", file_path]) pass def do_refactor(self): db = und.open(self.udb_path) self.get_source_class_map(db=db) listener = ExtractClassRefactoringListener( common_token_stream=self.token_stream, new_class=self.new_class, source_class=self.source_class, moved_fields=self.moved_fields, moved_methods=self.moved_methods, method_map=self.method_usage_map) self.object_name = listener.object_name self.walker.walk(listener=listener, t=self.tree) # Find Field and Method Usages field_usages = [] for field in self.moved_fields: for ent in db.lookup(f"{self.source_class}.{field}"): # print(ent.name(), " [", ent.kindname(), "]", sep="", end="\n") for ref in ent.refs("useBy, setBy, modifyBy"): if Path(ref.file().longname()) == Path(self.file_path): continue field_usage = { 'field_name': field, 'file_path': ref.file().longname() } if field_usage not in field_usages: field_usages.append(field_usage) # print(listener.token_stream_rewriter.getDefaultText()) # print("=" * 25) # print(listener.code) stream = InputStream(listener.code) lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener = NewClassPropagation(common_token_stream=token_stream, method_map=self.method_usage_map, source_class=self.source_class, moved_fields=self.moved_fields) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) # print(my_listener.token_stream_rewriter.getDefaultText()) # Write Changes with open(self.file_path, 'w') as f: f.write(listener.token_stream_rewriter.getDefaultText()) with open(self.new_file_path, 'w') as f: f.write(my_listener.token_stream_rewriter.getDefaultText()) # Propagate and reformat self.propagate_fields(field_usages) self.reformat(self.file_path) self.reformat(self.new_file_path) db.close()
def main(): """ it builds the parse tree and walk its corresponding walker so that our overridden methods run. """ # udb_path = "/home/ali/Desktop/code/TestProject/TestProject.udb" # udb_path=create_understand_database("C:\\Users\\asus\\Desktop\\test_project") # source_class = "GodClass" # moved_methods = ['method1', 'method3', ] # moved_fields = ['field1', 'field2', ] udb_path = "C:\\Users\\asus\\Desktop\\test_project\\test_project.udb" # moved_methods = ['getValue', 'rowToJSONArray', 'getVal', ] # moved_fields = ['number_2', 'number_1', ] source_class = "GodClass" moved_methods = ['method1', 'method3'] moved_fields = ['field1', 'field2'] father_path_file = "/data/Dev/JavaSample/src/GodClass.java" father_path_directory = "/data/Dev/JavaSample/src" path_to_refactor = "/data/Dev/JavaSample/src" new_class_file = "/data/Dev/JavaSample/src/GodSubClass.java" # source_class = "TaskNode" # moved_methods = ['getUserObject'] # moved_fields = [] # father_path_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\ganttproject\\ganttproject\\src\\main\\java\\net\\sourceforge\\ganttproject\\task\\TaskNode.java" # father_path_directory = "C:\\Users\\asus\\Desktop\\benchmark_projects\\ganttproject\\ganttproject\\src\\main\\java\\net\\sourceforge\\ganttproject\\task" # path_to_refactor = "C:\\Users\\asus\\Desktop\\benchmark_projects\\ganttproject" # new_class_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\ganttproject\\ganttproject\\src\\main\\java\\net\\sourceforge\\ganttproject\\task\\TaskNodeextracted.java" # source_class = "SecuritySupport" # moved_methods = ['getSystemProperty'] # moved_fields = [] # father_path_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j\\src\\org\\apache\\html\\dom\\SecuritySupport.java" # father_path_directory = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j\\src\\org\\apache\\html\\dom" # path_to_refactor = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j" # new_class_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j\\src\\org\\apache\\html\\dom\\SecuritySupportextracted.java" # source_class = "BaseMarkupSerializer" # moved_methods = ['setOutputCharStream'] # moved_fields = [] # father_path_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j\\src\\org\\apache\\xml\\serialize\\BaseMarkupSerializer.java" # father_path_directory = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j\\src\\org\\apache\\xml\\serialize" # path_to_refactor = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j" # new_class_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\xerces2-j\\src\\org\\apache\\xml\\serialize\\BaseMarkupSerializerextracted.java" # source_class = "Piece" # moved_methods = ['setX'] # moved_fields = [] # father_path_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\Chess_master\\src\\game\\Piece.java" # father_path_directory = "C:\\Users\\asus\\Desktop\\benchmark_projects\\Chess_master\\src\\game" # path_to_refactor = "C:\\Users\\asus\\Desktop\\benchmark_projects\\Chess_master" # new_class_file = "C:\\Users\\asus\\Desktop\\benchmark_projects\\Chess_master\\src\\game\\Pieceextracted.java" stream = FileStream(father_path_file, encoding='utf8') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener = ExtractSubClassRefactoringListener( common_token_stream=token_stream, source_class=source_class, new_class=source_class + "extracted", moved_fields=moved_fields, moved_methods=moved_methods, output_path=father_path_directory) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) with open(father_path_file, mode='w', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText()) extractJavaFilesAndProcess(path_to_refactor, father_path_file, new_class_file) for file in files_to_refactor: stream = FileStream(file, encoding='utf8') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener = FindUsagesListener(common_token_stream=token_stream, source_class=source_class, new_class=source_class + "extracted", moved_fields=moved_fields, moved_methods=moved_methods, output_path=father_path_directory) # output_path=father_path_directory) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) tmp_aul = my_listener.aul with open(file, mode='w', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText()) # after find usages try: stream = FileStream(file, encoding='utf8') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener = PropagationListener( common_token_stream=token_stream, source_class=source_class, new_class=source_class + "extracted", moved_fields=moved_fields, moved_methods=moved_methods, output_path=father_path_directory, aul=tmp_aul) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) with open(file, mode='w', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText()) except: print("not utf8")
def main(udb, child, parent): # initialize with understand udb_path = "/home/ali/Desktop/code/TestProject/TestProject.udb" child_path_file = "" father_path_file = "" file_list_to_be_propagate = set() propagate_classes = set() # db = und.open(udb) # for cls in db.ents("class"): # if cls.simplename() == child: # child_path_file = cls.parent().longname() # for ref in cls.refs("Coupleby"): # propagate_classes.add(ref.ent().longname()) # file_list_to_be_propagate.add(ref.ent().parent().longname()) # if cls.simplename() == parent: # father_path_file = cls.parent().longname() file_list_to_be_propagate = list(file_list_to_be_propagate) propagate_classes = list(propagate_classes) stream = FileStream(child_path_file, encoding='utf8', errors='ignore') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener_field_text = CollapseHierarchyRefactoringGetFieldTextListener( common_token_stream=token_stream, child_class=child) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener_field_text) field_code = my_listener_field_text.fieldcode my_listener_method_text = CollapseHierarchyRefactoringGetMethodTextListener( common_token_stream=token_stream, child_class=child) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener_method_text) methods_code = my_listener_method_text.methodcode # Remove child class my_listener_remove_child_class = RemoveClassRefactoringListener( common_token_stream=token_stream, class_name=child) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener_remove_child_class) with open(child_path_file, mode='w', newline='') as f: f.write(my_listener_remove_child_class.token_stream_rewriter. getDefaultText()) # Refactor stream = FileStream(father_path_file, encoding='utf8', errors='ignore') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener_refactor_action = CollapseHierarchyRefactoringListener( common_token_stream=token_stream, parent_class=parent, child_class=child, field_text=field_code, method_text=methods_code) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener_refactor_action) with open(father_path_file, mode='w', newline='') as f: f.write( my_listener_refactor_action.token_stream_rewriter.getDefaultText()) # Propagate for file in file_list_to_be_propagate: stream = FileStream(file, encoding='utf8', errors='ignore') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener_propagate = PropagationCollapseHierarchyListener( token_stream_rewriter=token_stream, old_class_name=child, new_class_name=parent, propagated_class_name=propagate_classes) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener_propagate) with open(file, mode='w', newline='') as f: f.write( my_listener_propagate.token_stream_rewriter.getDefaultText())