def do_refactoring(self): logger.info(f"Running {self.name}") logger.info(f"Parameters {self.params}") try: self.main(**self.params) except Exception as e: logger.error(f"Error in executing refactoring:\n {e}")
def do_refactoring(self): """ Check preconditions and apply refactoring operation to source code""" logger.info(f"Running {self.name}") logger.info(f"Parameters {self.params}") try: self.main(**self.params) except Exception as e: logger.error(f"Error in executing refactoring:\n {e}")
def main(project_dir, source_package, source_class, field_name, target_classes: list, *args, **kwargs): """ """ res = PushDownField( symbol_table.get_filenames_in_dir(project_dir), package_name=source_package, superclass_name=source_class, field_name=field_name, class_names=target_classes, ).do_refactor() if not res: logger.error("Cannot push-down field") return False return True
def main(udb_path, source_package, source_class, source_method, *args, **kwargs): """ """ db = und.open(udb_path) methods = db.lookup(f"{source_package}.{source_class}.{source_method}", "Method") if methods is None or len(methods) == 0: logger.error("Invalid inputs.") db.close() return False method_entity = methods[0] if method_entity.simplename() != source_method: logger.error("Invalid entity.") db.close() return False # Strong overlay precondition # if not method_entity.kind().check("Private"): # logger.error("Method is not private.") # db.close() # return False parent = method_entity.parent() while parent.parent() is not None: parent = parent.parent() main_file = parent.longname() # The file that contain the method db.close() parse_and_walk( file_path=main_file, listener_class=IncreaseMethodVisibilityListener, has_write=True, source_class=source_class, source_method=source_method ) # db.close() return True
def main(udb_path, file_path, source_class, moved_fields, moved_methods, *args, **kwargs): new_class = f"{source_class}Extracted" new_file_path = os.path.join(Path(file_path).parent, f"{new_class}.java") if not os.path.exists(file_path): logger.error(f'The source class "{source_class}" is nested in {file_path}') return False if os.path.exists(new_file_path): logger.error(f'The new class "{new_file_path}" already exist.') return False eca = ExtractClassAPI( udb_path=udb_path, file_path=file_path, source_class=source_class, new_class=new_class, moved_fields=moved_fields, moved_methods=moved_methods, new_file_path=new_file_path ) eca.get_source_class_map() if len(eca.method_usage_map) == 0: logger.error(f'The method_usage_map is empty: {len(eca.method_usage_map)}') return False else: res = eca.do_refactor() return res
def main(udb_path, source_package, source_class, source_field, *args, **kwargs): """ """ db = und.open(udb_path) field_ent = db.lookup(f"{source_package}.{source_class}.{source_field}", "Variable") if len(field_ent) == 0: logger.error("Invalid inputs.") db.close() return False field_ent = field_ent[0] if field_ent.simplename() != source_field: logger.error("Invalid entity.") db.close() return False # Strong overlay precondition # if not field_ent.kind().check("Public"): # logger.error("Field is not public.") # db.close() # return False for ref in field_ent.refs("Useby,Setby"): ent = ref.ent() if f"{source_package}.{source_class}" not in ent.longname(): logger.debug(f"{source_package}.{source_class} not in {ent.longname()}") logger.error("Field cannot set to private.") db.close() return False parent = field_ent.parent() while parent.parent() is not None: parent = parent.parent() main_file = parent.longname() db.close() parse_and_walk( file_path=main_file, listener_class=DecreaseFieldVisibilityListener, has_write=True, source_class=source_class, source_field=source_field ) return True
def main(udb_path, source_package, source_class, source_method, *args, **kwargs): """ """ db = und.open(udb_path) method_ent = db.lookup(f"{source_package}.{source_class}.{source_method}", "Method") if len(method_ent) == 0: logger.error("Invalid inputs.") db.close() return False method_ent = method_ent[0] if method_ent.simplename() != source_method: logger.error("Invalid entity.") db.close() return False # Strong overlay precondition # if not method_ent.kind().check("Public"): # logger.error("Method is not public.") # db.close() # return False for ent in method_ent.ents("CallBy"): if f"{source_package}.{source_class}" not in ent.longname(): logger.error("Method cannot set to private.") db.close() return False parent = method_ent.parent() while parent.parent() is not None: parent = parent.parent() main_file = parent.longname() db.close() parse_and_walk(file_path=main_file, listener_class=DecreaseMethodVisibilityListener, has_write=True, source_class=source_class, source_method=source_method) return True
def main(source_class: str, source_package: str, target_class: str, target_package: str, method_name: str, udb_path: str, *args, **kwargs): """ """ import_statement = None if source_package != target_package: import_statement = f"\nimport {target_package}.{target_class};" instance_name = target_class.lower() + "ByCodArt" db = und.open(udb_path) method_map, class_ent = get_source_class_map(db, source_class) if class_ent is None: logger.error("Class entity is None") return False # Strong overlay precondition # if class_ent.refs("Extend ~Implicit, ExtendBy, Implement"): # logger.error("Class is in inheritance or implements an interface.") # db.close() # return False # Check if method is static method_ent = db.lookup(f"{source_package}.{source_class}.{method_name}", "Method") if len(method_ent) >= 1: method_ent = method_ent[0] else: logger.error("Entity not found.") db.close() return False if method_ent.simplename() != method_name: logger.error("Can not move method duo to duplicated entities.") logger.info(f"{method_ent}, {method_ent.kindname()}") db.close() return False if source_package == target_package and source_class == target_class: logger.error("Can not move to self.") db.close() return False is_static = STATIC in method_ent.kindname() # Find usages usages = {} for ref in method_ent.refs("Callby"): file = ref.file().longname() if file in usages: usages[file].append(ref.line()) else: usages[file] = [ref.line(), ] try: src_class_file = db.lookup(f"{source_package}.{source_class}.java", "File")[0].longname() target_class_file = db.lookup(f"{target_package}.{target_class}.java", "File")[0].longname() except IndexError: logger.error("This is a nested method.") logger.info(f"{source_package}.{source_class}.java") logger.info(f"{target_package}.{target_class}.java") db.close() return False db.close() # Check if there is an cycle listener = parse_and_walk( file_path=target_class_file, listener_class=CheckCycleListener, class_name=source_class ) if not listener.is_valid: logger.error(f"Can not move method because there is a cycle between {source_class}, {target_class}") # db.close() return False # Propagate Changes for file in usages.keys(): public_class_name = os.path.basename(file).split(".")[0] is_in_target_class = public_class_name == target_class parse_and_walk( file_path=file, listener_class=PropagateListener, has_write=True, method_name=method_name, new_name=f"{instance_name}.{method_name}", lines=usages[file], is_in_target_class=is_in_target_class, method_map=method_map, ) # exit(-1) # Do the cut and paste! # Cut listener = parse_and_walk( file_path=src_class_file, listener_class=CutMethodListener, has_write=True, class_name=target_class, instance_name=instance_name, method_name=method_name, is_static=is_static, import_statement=import_statement, ) method_text = listener.method_text # Paste listener = parse_and_walk( file_path=target_class_file, listener_class=PasteMethodListener, has_write=True, method_text=method_text, source_class=source_class, method_map=method_map, imports=listener.imports, ) # Post-Paste: Reference Injection parse_and_walk( file_path=target_class_file, listener_class=ReferenceInjectorAndConstructorListener, has_write=True, method_text=method_text, source_class=source_class, method_map=method_map, imports=None, has_empty_cons=listener.has_empty_cons, ) # db.close() return True
def exitFieldDeclaration(self, ctx: JavaParserLabeled.FieldDeclarationContext): if self.in_source_class and self.in_selected_package: if ctx.variableDeclarators().variableDeclarator( 0).variableDeclaratorId().getText( ) == self.field_identifier: if not ctx.parentCtx.parentCtx.modifier(0): self.token_stream_rewriter.insertBeforeIndex( index=ctx.typeType().stop.tokenIndex, text='private ') elif ctx.parentCtx.parentCtx.modifier(0).getText() == 'public': self.token_stream_rewriter.replaceRange( from_idx=ctx.parentCtx.parentCtx.modifier( 0).start.tokenIndex, to_idx=ctx.parentCtx.parentCtx.modifier( 0).stop.tokenIndex, text='private') else: return for c in ctx.parentCtx.parentCtx.parentCtx.classBodyDeclaration( ): try: print('method name: ' + c.memberDeclaration(). methodDeclaration().IDENTIFIER().getText()) if c.memberDeclaration().methodDeclaration().IDENTIFIER() \ .getText() == 'get' + str.capitalize( self.field_identifier): self.getter_exist = True if c.memberDeclaration().methodDeclaration().IDENTIFIER() \ .getText() == 'set' + str.capitalize( self.field_identifier): self.setter_exist = True except: logger.error("not method !!!") logger.debug("setter find: " + str(self.setter_exist)) logger.debug("getter find: " + str(self.getter_exist)) # generate accessor and mutator methods # Accessor body new_code = '' if not self.getter_exist: new_code = '\n\t// new getter method\n\t' new_code += 'public ' + ctx.typeType().getText() + \ ' get' + str.capitalize(self.field_identifier) new_code += '() { \n\t\treturn this.' + self.field_identifier \ + ';' + '\n\t}\n' # Mutator body if not self.setter_exist: new_code += '\n\t// new setter method\n\t' new_code += 'public void set' + str.capitalize( self.field_identifier) new_code += '(' + ctx.typeType().getText() + ' ' \ + self.field_identifier + ') { \n\t\t' new_code += 'this.' + self.field_identifier + ' = ' \ + self.field_identifier + ';' + '\n\t}\n' self.token_stream_rewriter.insertAfter(ctx.stop.tokenIndex, new_code) hidden = self.token_stream.getHiddenTokensToRight( ctx.stop.tokenIndex)
def main(udb_path, source_package, target_class, class_names: list, *args, **kwargs): """ """ if len(class_names) < 2: logger.error("class_names is empty.") return False db = und.open(udb_path) parent_cons = [] # Check children parent = db.lookup(f"{target_class}", "Public Class") if len(parent) != 1: logger.error("Count of target class is not 1.") db.close() return False parent = parent[0] parent_file = db.lookup(f"{target_class}.java", "File")[0].longname() for i in parent.ents("Define", "Constructor"): parent_cons.append(i.parameters()) # Find constructor entities group by signature constructors = {} for child in class_names: cons = db.lookup(f"{child}.{child}", "Constructor") for con in cons: if con.parent() is not None: if source_package not in con.parent().longname(): logger.error("Source package does not match.") db.close() return False parameters = con.parameters() if parameters in constructors: constructors[parameters].append(con) else: constructors[parameters] = [con] # Find common statements for k in constructors: meta_data = { parent_file: { 'is_father': True, 'has_father_con': k in parent_cons, 'class_name': parent.simplename() }, } con = constructors[k][0] ents = [] for ref in con.refs("Set"): data = { 'is_father': False, 'has_father_con': k in parent_cons, 'class_name': con.parent().simplename() } if ref.file().longname() not in meta_data.keys(): meta_data[ref.file().longname()] = data if target_class in ref.ent().longname(): ents.append(ref.ent().simplename()) for i in range(1, len(constructors[k])): con2 = constructors[k][i] for ref in con2.refs("Set"): data = { 'is_father': False, 'has_father_con': k in parent_cons, 'class_name': con2.parent().simplename() } if ref.file().longname() not in meta_data.keys(): meta_data[ref.file().longname()] = data if target_class in ref.ent().longname(): ents.append(ref.ent().simplename()) ents = [ item for item, count in collections.Counter(ents).items() if count > 1 ] if len(meta_data.keys()) > 1: for file_name in meta_data: data = meta_data[file_name] parse_and_walk(file_name, PullUpConstructorListener, has_write=True, is_father=data['is_father'], has_father_con=data['has_father_con'], common_sets=ents, class_name=data['class_name'], params=k) db.close() return True
def main(udb_path, source_package, source_class, method_name, target_classes: list, *args, **kwargs): """ The main API for the push-down method refactoring operation """ target_package = source_package source_method = method_name main_file = None source_method_entity = None is_static = False propagation_files = [] propagation_classes = [] propagation_lines = [] children_classes = [] children_files = [] # Initialize with understand db = und.open(udb_path) methods = db.ents("Java Method") for mth in methods: if mth.longname( ) == source_package + "." + source_class + "." + source_method: source_method_entity = mth for child_ref in mth.parent().refs("Extendby"): child_ref = child_ref.ent() if child_ref.simplename() in target_classes: children_classes.append(child_ref.simplename()) children_files.append(child_ref.parent().longname()) # print("mainfile : ", mth.parent().parent().longname()) is_static = mth.kind().check("static") main_file = mth.parent().parent().longname() for ref in mth.refs("Callby"): propagation_files.append( ref.ent().parent().parent().longname()) propagation_classes.append(ref.ent().parent().simplename()) propagation_lines.append(ref.line()) # Check pre-condition if not len(target_classes) == 1: logger.error(f"len(target_classes) is not 1.") db.close() return False if not len(children_classes) == 1: logger.error(f"len(children_classes) is not 1.") db.close() return False if not len(children_files) == 1: logger.error(f"len(children_files) is not 1.") db.close() return False for mth in methods: if mth.simplename() == source_method: if mth.parent().simplename() in target_classes: if mth.type() == source_method_entity.type(): if mth.kind() == source_method_entity.kind(): if mth.parameters() == source_method_entity.parameters( ): logger.error("Duplicated method") db.close() return False for ref in source_method_entity.refs("use, call"): ref_ent = ref.ent() is_public = ref_ent.kind().check("public") if not is_public: logger.error("Has internal dependencies.") db.close() return False # get text method_text = source_method_entity.contents() db.close() # Delete source method stream = FileStream(main_file, encoding='utf8', errors='ignore') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener = DeleteSourceListener(common_token_stream=token_stream, source_method=source_method) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) # print(my_listener.token_stream_rewriter.getDefaultText()) with open(main_file, mode='w', encoding='utf-8', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText()) # Do the push down for child_file, child_class in zip(children_files, children_classes): stream = FileStream(child_file, encoding='utf8', errors='ignore') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener = PushDownMethodRefactoringListener( common_token_stream=token_stream, source_class=child_class, source_method_text=method_text) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) # print(my_listener.token_stream_rewriter.getDefaultText()) with open(child_file, mode='w', encoding='utf8', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText()) # Propagation for file, _class, line in zip(propagation_files, propagation_classes, propagation_lines): stream = FileStream(file, encoding='utf8', errors='ignore') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() if is_static: my_listener = PropagationStaticListener( common_token_stream=token_stream, source_class=source_class, child_class=children_classes[0], class_name=_class, method_name=source_method, ref_line=line, target_package=target_package) else: my_listener = PropagationNonStaticListener( common_token_stream=token_stream, source_class=source_class, child_class=children_classes[0], class_name=_class, method_name=source_method, ref_line=line, target_package=target_package) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) # print(my_listener.token_stream_rewriter.getDefaultText()) with open(file, mode='w', encoding='utf8', errors='ignore', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText()) return True
def main(udb_path: str, children_classes: list, method_name: str, *args, **kwargs): """ """ if len(children_classes) <= 1: logger.error("len(children_classes) should be gte 2") return False # Initialize with understand destination_class = "" fileslist_to_be_rafeactored = set() fileslist_to_be_propagate = set() propagation_classes = set() db = und.open(udb_path) try: method_ents = [ db.lookup(i + "." + method_name, "method")[0] for i in children_classes ] except IndexError: # print([db.lookup(i + "." + method_name, "method") for i in children_classes]) logger.error( f"Method {method_name} does not exists in all children_classes.") db.close() return False # Get method text method_text = method_ents[0].contents().strip() for method_ent in method_ents: if method_ent.contents().strip() != method_text: logger.error("Method content is different.") db.close() return False for ref in method_ent.refs("Use,Call"): if ref.ent().parent() is not None: if ref.ent().parent().simplename() in children_classes: logger.error("Method has internal dependencies.") db.close() return False for mth in db.ents("Java Method"): for child in children_classes: if mth.longname().endswith(child + "." + method_name): fileslist_to_be_rafeactored.add( mth.parent().parent().longname()) for fth in mth.parent().refs("Extend"): destination_class = fth.ent().longname() fileslist_to_be_rafeactored.add( fth.ent().parent().longname()) for ref in mth.refs("Java Callby"): propagation_classes.add(ref.ent().parent().longname()) fileslist_to_be_propagate.add( ref.ent().parent().parent().longname()) db.close() # print("=========================================") # print("fileslist_to_be_propagate :", fileslist_to_be_propagate) # print("propagation_classes : ", propagation_classes) # print("fileslist_to_be_rafeactored :", fileslist_to_be_rafeactored) # print("father class :", destination_class) fileslist_to_be_rafeactored = list(fileslist_to_be_rafeactored) fileslist_to_be_propagate = list(fileslist_to_be_propagate) propagation_class = list(propagation_classes) # refactored start for file in fileslist_to_be_rafeactored: try: stream = FileStream(file, encoding='utf-8', errors='ignore') except: continue lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener_refactor = PullUpMethodRefactoringListener( common_token_stream=token_stream, destination_class=destination_class, children_class=children_classes, moved_methods=method_name, method_text=method_text) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener_refactor) with open(file, mode='w', encoding='utf-8', newline='') as f: f.write( my_listener_refactor.token_stream_rewriter.getDefaultText()) # end refactoring # beginning of propagate for file in fileslist_to_be_propagate: if not os.path.exists(file): continue stream = FileStream(file, encoding='utf-8', errors='ignore') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener_propagate = PropagationPullUpMethodRefactoringListener( token_stream_rewriter=token_stream, old_class_name=children_classes, new_class_name=destination_class, propagated_class_name=propagation_class) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener_propagate) with open(file, mode='w', encoding='utf8', errors='ignore', newline='') as f: f.write( my_listener_propagate.token_stream_rewriter.getDefaultText()) # end of propagate return True
def main(source_class: str, source_package: str, target_class: str, target_package: str, field_name: str, udb_path: str, *args, **kwargs): """ Move filed main API """ import_statement = None if source_package != target_package: import_statement = f"\nimport {target_package}.{target_class};" instance_name = target_class.lower() + "ByCodArt" db = und.open(udb_path) # Check if field is static field_ent = db.lookup(f"{source_package}.{source_class}.{field_name}", "Variable") if len(field_ent) == 0: logger.error( f"Entity not found with query: {source_package}.{source_class}.{field_name}." ) db.close() return False if source_package == target_package and source_class == target_class: logger.error("Can not move to self.") db.close() return False field_ent = field_ent[0] is_static = field_ent.kindname() == STATIC if is_static: logger.warning("Field is static!") # Find usages usages = {} for ref in field_ent.refs("Setby, Useby"): file = ref.file().longname() if file in usages: usages[file].append(ref.line()) else: usages[file] = [ ref.line(), ] try: src_class_file = db.lookup( f"{source_package}.{source_class}.java")[0].longname() target_class_file = db.lookup( f"{target_package}.{target_class}.java")[0].longname() except IndexError: logger.error("This is a nested class.") logger.info(f"{source_package}.{source_class}.java") logger.info(f"{target_package}.{target_class}.java") db.close() return False db.close() # Check if there is an cycle listener = parse_and_walk( file_path=target_class_file, listener_class=CheckCycleListener, class_name=source_class, ) if not listener.is_valid: logger.error( f"Can not move field because there is a cycle between {source_class}, {target_class}" ) # db.close() return False # Propagate Changes for file in usages.keys(): parse_and_walk( file_path=file, listener_class=PropagateListener, has_write=True, field_name=field_name, new_name=f"{instance_name}.{field_name}", lines=usages[file], ) # Do the cut and paste! # Cut listener = parse_and_walk(file_path=src_class_file, listener_class=CutFieldListener, has_write=True, class_name=target_class, instance_name=instance_name, field_name=field_name, is_static=is_static, import_statement=import_statement) field_text = listener.field_text # Paste parse_and_walk( file_path=target_class_file, listener_class=PasteFieldListener, has_write=True, field_text=field_text, ) # db.close() return True
def main(udb_path=None, source_package=None, source_class=None, field_name=None, target_classes: list = None, *args, **kwargs): """ The main API for push-down field refactoring """ if udb_path is None: db = und.open(sbse.config.UDB_PATH) else: db = und.open(udb_path) source_class_ent = None source_class_ents = db.lookup(f"{source_package}.{source_class}", "Class") if len(source_class_ents) == 0: logger.error(f"Cannot find source class: {source_class}") db.close() return False else: for ent in source_class_ents: if ent.simplename() == source_class: source_class_ent = ent break if source_class_ent is None: logger.error(f"Cannot find source class: {source_class}") db.close() return False fields = db.lookup(f"{source_package}.{source_class}.{field_name}", "Variable") if fields is None or len(fields) == 0: logger.error(f"Cannot find field to pushdown: {field_name}") db.close() return False else: field_ent = fields[0] target_class_ents_files = [] target_class_ents_simplenames = [] for ref in source_class_ent.refs("Extendby"): if ref.ent().simplename() not in target_classes: logger.error("Target classes are not children classes") db.close() return False target_class_ents_files.append(ref.ent().parent().longname()) target_class_ents_simplenames.append(ref.ent().simplename()) for ref in field_ent.refs("Useby, Setby"): if ref.file().simplename().split(".")[0] in target_classes: continue else: logger.error("Field has dependencies.") db.close() return False source_class_file = source_class_ent.parent().longname() db.close() # Remove field from source class listener = parse_and_walk( file_path=source_class_file, listener_class=CutFieldListener, has_write=True, source_class=source_class, field_name=field_name, debug=False ) # Insert field in children classes for i, target_class_file in enumerate(target_class_ents_files): parse_and_walk( file_path=target_class_file, listener_class=PasteFieldListener, has_write=True, source_class=target_class_ents_simplenames[i], field_content=listener.field_content, import_statements=listener.import_statements, debug=False ) # db.close() return True