def do_refactoring(self): logger.info(f"Running {self.name}") logger.info(f"Parameters {self.params}") try: self.main(**self.params) except Exception as e: logger.error(f"Error in executing refactoring:\n {e}")
def main(project_dir, source_package, source_class, field_name, target_classes: list, *args, **kwargs): """ """ res = PushDownField( symbol_table.get_filenames_in_dir(project_dir), package_name=source_package, superclass_name=source_class, field_name=field_name, class_names=target_classes, ).do_refactor() if not res: logger.error("Cannot push-down field") return False return True
def main(udb_path, source_package, source_class, source_field, *args, **kwargs): """ """ db = und.open(udb_path) fields = db.lookup(f"{source_package}.{source_class}.{source_field}", "Variable") if len(fields) == 0: logger.error("Invalid inputs.") db.close() return False field_ent = fields[0] if field_ent.simplename() != source_field: logger.error("Invalid entity.") db.close() return False # Strong overlay precondition # if not field_ent.kind().check("private"): # logger.error("Field is not private.") # db.close() # return False parent = field_ent.parent() while parent.parent() is not None: parent = parent.parent() main_file = str(parent.longname()) db.close() parse_and_walk(file_path=main_file, listener_class=IncreaseFieldVisibilityListener, has_write=True, source_class=source_class, source_field=source_field) # db.close() return True
def main(udb_path, source_package, source_class, source_method, *args, **kwargs): """ """ db = und.open(udb_path) methods = db.lookup(f"{source_package}.{source_class}.{source_method}", "Method") if methods is None or len(methods) == 0: logger.error("Invalid inputs.") db.close() return False method_entity = methods[0] if method_entity.simplename() != source_method: logger.error("Invalid entity.") db.close() return False # Strong overlay precondition # if not method_entity.kind().check("Private"): # logger.error("Method is not private.") # db.close() # return False parent = method_entity.parent() while parent.parent() is not None: parent = parent.parent() main_file = parent.longname() # The file that contain the method db.close() parse_and_walk(file_path=main_file, listener_class=IncreaseMethodVisibilityListener, has_write=True, source_class=source_class, source_method=source_method) # db.close() return True
def do_refactoring(self): """ Check preconditions and apply refactoring operation to source code Returns: result (boolean): The result statues of the applied refactoring """ logger.info(f"Running {self.name}") logger.info(f"Parameters {self.params}") try: res = self.main(**self.params) logger.debug(f"Executed refactoring with result {res}") return res except Exception as e: logger.error(f"Unexpected error in executing refactoring:\n {e}") return False
def main(udb_path, file_path, source_class, moved_fields, moved_methods, *args, **kwargs): new_class = f"{source_class}Extracted" new_file_path = os.path.join(Path(file_path).parent, f"{new_class}.java") if not os.path.exists(file_path): logger.error( f'The source class "{source_class}" is nested in {file_path}') return False if os.path.exists(new_file_path): logger.error(f'The new class "{new_file_path}" already exist.') return False eca = ExtractClassAPI(udb_path=udb_path, file_path=file_path, source_class=source_class, new_class=new_class, moved_fields=moved_fields, moved_methods=moved_methods, new_file_path=new_file_path) eca.get_source_class_map() if len(eca.method_usage_map) == 0: logger.error( f'The method_usage_map is empty: {len(eca.method_usage_map)}') return False else: res = eca.do_refactor() return res
def main(udb_path, source_package, source_class, source_field, *args, **kwargs): """ """ db = und.open(udb_path) field_ent = db.lookup(f"{source_package}.{source_class}.{source_field}", "Variable") if len(field_ent) == 0: logger.error("Invalid inputs.") db.close() return False field_ent = field_ent[0] if field_ent.simplename() != source_field: logger.error("Invalid entity.") db.close() return False # Strong overlay precondition # if not field_ent.kind().check("Public"): # logger.error("Field is not public.") # db.close() # return False for ref in field_ent.refs("Useby,Setby"): ent = ref.ent() if f"{source_package}.{source_class}" not in ent.longname(): logger.debug( f"{source_package}.{source_class} not in {ent.longname()}") logger.error("Field cannot set to private.") db.close() return False parent = field_ent.parent() while parent.parent() is not None: parent = parent.parent() main_file = parent.longname() db.close() parse_and_walk(file_path=main_file, listener_class=DecreaseFieldVisibilityListener, has_write=True, source_class=source_class, source_field=source_field) return True
def main(udb_path, source_package, source_class, source_method, *args, **kwargs): """ """ db = und.open(udb_path) method_ent = db.lookup(f"{source_package}.{source_class}.{source_method}", "Method") if len(method_ent) == 0: logger.error("Invalid inputs.") db.close() return False method_ent = method_ent[0] if method_ent.simplename() != source_method: logger.error("Invalid entity.") db.close() return False # Strong overlay precondition # if not method_ent.kind().check("Public"): # logger.error("Method is not public.") # db.close() # return False for ent in method_ent.ents("CallBy"): if f"{source_package}.{source_class}" not in ent.longname(): logger.error("Method cannot set to private.") db.close() return False parent = method_ent.parent() while parent.parent() is not None: parent = parent.parent() main_file = parent.longname() db.close() parse_and_walk(file_path=main_file, listener_class=DecreaseMethodVisibilityListener, has_write=True, source_class=source_class, source_method=source_method) return True
def exitFieldDeclaration(self, ctx: JavaParserLabeled.FieldDeclarationContext): if self.in_source_class and self.in_selected_package: if ctx.variableDeclarators().variableDeclarator( 0).variableDeclaratorId().getText( ) == self.field_identifier: if not ctx.parentCtx.parentCtx.modifier(0): self.token_stream_rewriter.insertBeforeIndex( index=ctx.typeType().stop.tokenIndex, text='private ') elif ctx.parentCtx.parentCtx.modifier(0).getText() == 'public': self.token_stream_rewriter.replaceRange( from_idx=ctx.parentCtx.parentCtx.modifier( 0).start.tokenIndex, to_idx=ctx.parentCtx.parentCtx.modifier( 0).stop.tokenIndex, text='private') else: return for c in ctx.parentCtx.parentCtx.parentCtx.classBodyDeclaration( ): try: print('method name: ' + c.memberDeclaration(). methodDeclaration().IDENTIFIER().getText()) if c.memberDeclaration().methodDeclaration().IDENTIFIER() \ .getText() == 'get' + str.capitalize( self.field_identifier): self.getter_exist = True if c.memberDeclaration().methodDeclaration().IDENTIFIER() \ .getText() == 'set' + str.capitalize( self.field_identifier): self.setter_exist = True except: logger.error("not method !!!") logger.debug("setter find: " + str(self.setter_exist)) logger.debug("getter find: " + str(self.getter_exist)) # generate accessor and mutator methods # Accessor body new_code = '' if not self.getter_exist: new_code = '\n\t// new getter method\n\t' new_code += 'public ' + ctx.typeType().getText() + \ ' get' + str.capitalize(self.field_identifier) new_code += '() { \n\t\treturn this.' + self.field_identifier \ + ';' + '\n\t}\n' # Mutator body if not self.setter_exist: new_code += '\n\t// new setter method\n\t' new_code += 'public void set' + str.capitalize( self.field_identifier) new_code += '(' + ctx.typeType().getText() + ' ' \ + self.field_identifier + ') { \n\t\t' new_code += 'this.' + self.field_identifier + ' = ' \ + self.field_identifier + ';' + '\n\t}\n' self.token_stream_rewriter.insertAfter(ctx.stop.tokenIndex, new_code) hidden = self.token_stream.getHiddenTokensToRight( ctx.stop.tokenIndex)
def do_refactor(self): program = symbol_table.get_program(self.source_filenames, print_status=False) # print(program.packages) if ( self.package_name not in program.packages or self.class_name not in program.packages[self.package_name].classes or self.field_name not in program.packages[self.package_name].classes[self.class_name].fields ): logger.error("One or more inputs are not valid.") return False _class: symbol_table.Class = program.packages[self.package_name].classes[self.class_name] if _class.superclass_name is None: logger.error("Super class is none.") return False superclass_name = _class.superclass_name if not program.packages[self.package_name].classes.get(superclass_name): logger.error("Super class package is none!") return False superclass: symbol_table.Class = program.packages[self.package_name].classes[superclass_name] superclass_body_start = symbol_table.TokensInfo(superclass.parser_context.classBody()) superclass_body_start.stop = superclass_body_start.start # Start and stop both point to the '{' if self.field_name in superclass.fields: logger.error("Field is in superclass fields.") return False datatype = _class.fields[self.field_name].datatype fields_to_remove = [] for pn in program.packages: p: symbol_table.Package = program.packages[pn] for cn in p.classes: c: symbol_table.Class = p.classes[cn] if ( ( ( c.superclass_name == superclass_name and c.file_info.has_imported_class(self.package_name, superclass_name) ) or (self.package_name is not None and c.superclass_name == superclass_name) ) and self.field_name in c.fields and c.fields[self.field_name].datatype == datatype ): fields_to_remove.append(c.fields[self.field_name]) if len(fields_to_remove) == 0: logger.error("No fields to remove.") return False is_public = False is_protected = True for field in fields_to_remove: field: symbol_table.Field = field is_public = is_public or "public" in field.modifiers is_protected = is_protected and ("protected" in field.modifiers or "private" in field.modifiers) rewriter = symbol_table.Rewriter(program, self.filename_mapping) rewriter.insert_after(superclass_body_start, "\n\t" + ( "public " if is_public else ( "protected " if is_protected else "")) + datatype + " " + self.field_name + ";") for field in fields_to_remove: if len(field.neighbor_names) == 0: rewriter.replace(field.get_tokens_info(), "") # Have to remove the modifiers too, because of the new grammar. for mod_ctx in field.modifiers_parser_contexts: rewriter.replace(symbol_table.TokensInfo(mod_ctx), "") else: i = field.index_in_variable_declarators var_ctxs = field.all_variable_declarator_contexts if i == 0: to_remove = symbol_table.TokensInfo(var_ctxs[i]) to_remove.stop = symbol_table.TokensInfo( var_ctxs[i + 1]).start - 1 # Include the ',' after it rewriter.replace(to_remove, "") else: to_remove = symbol_table.TokensInfo(var_ctxs[i]) to_remove.start = symbol_table.TokensInfo( var_ctxs[i - 1]).stop + 1 # Include the ',' before it rewriter.replace(to_remove, "") # Add initializer to class constructor if initializer exists in field declaration if field.initializer is not None: _class: symbol_table.Class = program.packages[field.package_name].classes[field.class_name] initializer_statement = (field.name + " = " + ("new " + field.datatype + " " if field.initializer.startswith('{') else "") + field.initializer + ";") # Todo: Requires better handling if 'new' in initializer_statement and '()' in initializer_statement: initializer_statement = initializer_statement.replace('new', 'new ') has_contructor = False for class_body_decl in _class.parser_context.classBody().getChildren(): if class_body_decl.getText() in ['{', '}']: continue member_decl = class_body_decl.memberDeclaration() if member_decl is not None: constructor = member_decl.constructorDeclaration() if constructor is not None: body = constructor.constructorBody # Start token = '{' body_start = symbol_table.TokensInfo(body) body_start.stop = body_start.start # Start and stop both point to the '{' rewriter.insert_after(body_start, "\n\t" + initializer_statement) has_contructor = True if not has_contructor: body = _class.parser_context.classBody() body_start = symbol_table.TokensInfo(body) body_start.stop = body_start.start # Start and stop both point to the '{' rewriter.insert_after(body_start, "\n\t" + _class.modifiers[ 0] + " " + _class.name + "() { " + initializer_statement + " }" ) rewriter.apply() # Todo: check for multilevel inheritance recursively. # if _class.superclass_name is not None: # PullUpFieldRefactoring(self.source_filenames, self.package_name, _class.superclass_name, "id").do_refactor() return True
def main(source_class: str, source_package: str, target_class: str, target_package: str, method_name: str, udb_path: str, *args, **kwargs): """ """ import_statement = None if source_package != target_package: import_statement = f"\nimport {target_package}.{target_class};" instance_name = target_class.lower() + "ByCodArt" db = und.open(udb_path) method_map, class_ent = get_source_class_map(db, source_class) if class_ent is None: logger.error("Class entity is None") return False # Strong overlay precondition # if class_ent.refs("Extend ~Implicit, ExtendBy, Implement"): # logger.error("Class is in inheritance or implements an interface.") # db.close() # return False # Check if method is static method_ent = db.lookup(f"{source_package}.{source_class}.{method_name}", "Method") if len(method_ent) >= 1: method_ent = method_ent[0] else: logger.error("Entity not found.") db.close() return False if method_ent.simplename() != method_name: logger.error("Can not move method duo to duplicated entities.") logger.info(f"{method_ent}, {method_ent.kindname()}") db.close() return False if source_package == target_package and source_class == target_class: logger.error("Can not move to self.") db.close() return False is_static = STATIC in method_ent.kindname() # Find usages usages = {} for ref in method_ent.refs("Callby"): file = ref.file().longname() if file in usages: usages[file].append(ref.line()) else: usages[file] = [ ref.line(), ] try: src_class_file = db.lookup(f"{source_package}.{source_class}.java", "File")[0].longname() target_class_file = db.lookup(f"{target_package}.{target_class}.java", "File")[0].longname() except IndexError: logger.error("This is a nested method.") logger.info(f"{source_package}.{source_class}.java") logger.info(f"{target_package}.{target_class}.java") db.close() return False db.close() # Check if there is an cycle listener = parse_and_walk(file_path=target_class_file, listener_class=CheckCycleListener, class_name=source_class) if not listener.is_valid: logger.error( f"Can not move method because there is a cycle between {source_class}, {target_class}" ) # db.close() return False # Propagate Changes for file in usages.keys(): public_class_name = os.path.basename(file).split(".")[0] is_in_target_class = public_class_name == target_class parse_and_walk( file_path=file, listener_class=PropagateListener, has_write=True, method_name=method_name, new_name=f"{instance_name}.{method_name}", lines=usages[file], is_in_target_class=is_in_target_class, method_map=method_map, ) # exit(-1) # Do the cut and paste! # Cut listener = parse_and_walk( file_path=src_class_file, listener_class=CutMethodListener, has_write=True, class_name=target_class, instance_name=instance_name, method_name=method_name, is_static=is_static, import_statement=import_statement, ) method_text = listener.method_text # Paste listener = parse_and_walk( file_path=target_class_file, listener_class=PasteMethodListener, has_write=True, method_text=method_text, source_class=source_class, method_map=method_map, imports=listener.imports, ) # Post-Paste: Reference Injection parse_and_walk( file_path=target_class_file, listener_class=ReferenceInjectorAndConstructorListener, has_write=True, method_text=method_text, source_class=source_class, method_map=method_map, imports=None, has_empty_cons=listener.has_empty_cons, ) # db.close() return True
def main(udb_path, source_package, target_class, class_names: list, *args, **kwargs): """ """ if len(class_names) < 2: logger.error("class_names is empty.") return False db = und.open(udb_path) parent_cons = [] # Check children parent = db.lookup(f"{target_class}", "Public Class") if len(parent) != 1: logger.error("Count of target class is not 1.") db.close() return False parent = parent[0] parent_file = db.lookup(f"{target_class}.java", "File")[0].longname() for i in parent.ents("Define", "Constructor"): parent_cons.append(i.parameters()) # Find constructor entities group by signature constructors = {} for child in class_names: cons = db.lookup(f"{child}.{child}", "Constructor") for con in cons: if con.parent() is not None: if source_package not in con.parent().longname(): logger.error("Source package does not match.") db.close() return False parameters = con.parameters() if parameters in constructors: constructors[parameters].append(con) else: constructors[parameters] = [con] # Find common statements for k in constructors: meta_data = { parent_file: { 'is_father': True, 'has_father_con': k in parent_cons, 'class_name': parent.simplename() }, } con = constructors[k][0] ents = [] for ref in con.refs("Set"): data = { 'is_father': False, 'has_father_con': k in parent_cons, 'class_name': con.parent().simplename() } if ref.file().longname() not in meta_data.keys(): meta_data[ref.file().longname()] = data if target_class in ref.ent().longname(): ents.append(ref.ent().simplename()) for i in range(1, len(constructors[k])): con2 = constructors[k][i] for ref in con2.refs("Set"): data = { 'is_father': False, 'has_father_con': k in parent_cons, 'class_name': con2.parent().simplename() } if ref.file().longname() not in meta_data.keys(): meta_data[ref.file().longname()] = data if target_class in ref.ent().longname(): ents.append(ref.ent().simplename()) ents = [ item for item, count in collections.Counter(ents).items() if count > 1 ] if len(meta_data.keys()) > 1: for file_name in meta_data: data = meta_data[file_name] parse_and_walk(file_name, PullUpConstructorListener, has_write=True, is_father=data['is_father'], has_father_con=data['has_father_con'], common_sets=ents, class_name=data['class_name'], params=k) db.close() return True
def main(udb_path=None, source_package=None, source_class=None, field_name=None, target_classes: list = None, *args, **kwargs): """ The main API for push-down field refactoring """ if udb_path is None: db = und.open(codart.config.UDB_PATH) else: db = und.open(udb_path) source_class_ent = None source_class_ents = db.lookup(f"{source_package}.{source_class}", "Class") if len(source_class_ents) == 0: logger.error(f"Cannot find source class: {source_class}") db.close() return False else: for ent in source_class_ents: if ent.simplename() == source_class: source_class_ent = ent break if source_class_ent is None: logger.error(f"Cannot find source class: {source_class}") db.close() return False fields = db.lookup(f"{source_package}.{source_class}.{field_name}", "Variable") if fields is None or len(fields) == 0: logger.error(f"Cannot find field to pushdown: {field_name}") db.close() return False else: field_ent = fields[0] target_class_ents_files = [] target_class_ents_simplenames = [] for ref in source_class_ent.refs("Extendby"): if ref.ent().simplename() not in target_classes: logger.error("Target classes are not children classes") db.close() return False target_class_ents_files.append(ref.ent().parent().longname()) target_class_ents_simplenames.append(ref.ent().simplename()) for ref in field_ent.refs("Useby, Setby"): if ref.file().simplename().split(".")[0] in target_classes: continue else: logger.error("Field has dependencies.") db.close() return False source_class_file = source_class_ent.parent().longname() db.close() # Remove field from source class listener = parse_and_walk(file_path=source_class_file, listener_class=CutFieldListener, has_write=True, source_class=source_class, field_name=field_name, debug=False) # Insert field in children classes for i, target_class_file in enumerate(target_class_ents_files): parse_and_walk(file_path=target_class_file, listener_class=PasteFieldListener, has_write=True, source_class=target_class_ents_simplenames[i], field_content=listener.field_content, import_statements=listener.import_statements, debug=False) # db.close() return True
def main(): """ Optimization module main driver """ # Define initialization objects initializer_class = SmellInitialization if config.WARM_START else RandomInitialization initializer_object = initializer_class( udb_path=config.UDB_PATH, population_size=config.POPULATION_SIZE, lower_band=config.LOWER_BAND, upper_band=config.UPPER_BAND ) # ------------------------------------------- # Define optimization problems problems = list() # 0: Genetic (Single), 1: NSGA-II (Multi), 2: NSGA-III (Many) objectives problems problems.append( ProblemSingleObjective( n_objectives=config.NUMBER_OBJECTIVES, n_refactorings_lowerbound=config.LOWER_BAND, n_refactorings_upperbound=config.UPPER_BAND, evaluate_in_parallel=False, ) ) problems.append( ProblemMultiObjective( n_objectives=config.NUMBER_OBJECTIVES, n_refactorings_lowerbound=config.LOWER_BAND, n_refactorings_upperbound=config.UPPER_BAND, evaluate_in_parallel=False, ) ) problems.append( ProblemManyObjective( n_objectives=config.NUMBER_OBJECTIVES, n_refactorings_lowerbound=config.LOWER_BAND, n_refactorings_upperbound=config.UPPER_BAND, evaluate_in_parallel=False, verbose_design_metrics=True, ) ) # Define search algorithms algorithms = list() # 1: GA alg1 = GA( pop_size=config.POPULATION_SIZE, sampling=PopulationInitialization(initializer_object), crossover=AdaptiveSinglePointCrossover(prob=config.CROSSOVER_PROBABILITY), # crossover=get_crossover("real_k_point", n_points=2), mutation=BitStringMutation(prob=config.MUTATION_PROBABILITY, initializer=initializer_object), eliminate_duplicates=ElementwiseDuplicateElimination(cmp_func=is_equal_2_refactorings_list), n_gen=config.NGEN, ) algorithms.append(alg1) # 2: NSGA-II alg2 = NSGA2( pop_size=config.POPULATION_SIZE, sampling=PopulationInitialization(initializer_object), crossover=AdaptiveSinglePointCrossover(prob=config.CROSSOVER_PROBABILITY), # crossover=get_crossover("real_k_point", n_points=2), mutation=BitStringMutation(prob=config.MUTATION_PROBABILITY, initializer=initializer_object), eliminate_duplicates=ElementwiseDuplicateElimination(cmp_func=is_equal_2_refactorings_list), n_gen=config.NGEN, ) algorithms.append(alg2) # 3: NSGA-III # pop_size must be equal or larger than the number of reference directions number_of_references_points = config.POPULATION_SIZE - int(config.POPULATION_SIZE * 0.20) ref_dirs = get_reference_directions( 'energy', # algorithm config.NUMBER_OBJECTIVES, # number of objectives number_of_references_points, # number of reference directions seed=1 ) alg3 = NSGA3( ref_dirs=ref_dirs, pop_size=config.POPULATION_SIZE, # 200 sampling=PopulationInitialization(initializer_object), selection=TournamentSelection(func_comp=binary_tournament), crossover=AdaptiveSinglePointCrossover(prob=config.CROSSOVER_PROBABILITY, ), # crossover=get_crossover("real_k_point", n_points=2), mutation=BitStringMutation(prob=config.MUTATION_PROBABILITY, initializer=initializer_object), eliminate_duplicates=ElementwiseDuplicateElimination(cmp_func=is_equal_2_refactorings_list), n_gen=config.NGEN, ) algorithms.append(alg3) # Termination of algorithms my_termination = MultiObjectiveDefaultTermination( x_tol=None, cv_tol=None, f_tol=0.0015, nth_gen=5, n_last=5, n_max_gen=config.MAX_ITERATIONS, # about 1000 - 1400 n_max_evals=1e6 ) # Do optimization for various problems with various algorithms res = minimize( problem=problems[config.PROBLEM], algorithm=algorithms[config.PROBLEM], termination=my_termination, seed=1, verbose=False, copy_algorithm=True, copy_termination=True, save_history=False, callback=LogCallback(), ) # np.save('checkpoint', res.algorithm) # Log results logger.info(f"***** Algorithm was finished in {res.algorithm.n_gen + config.NGEN} generations *****") logger.info(" ") logger.info("============ time information ============") logger.info(f"Start time: {datetime.fromtimestamp(res.start_time).strftime('%Y-%m-%d %H:%M:%S')}") logger.info(f"End time: {datetime.fromtimestamp(res.end_time).strftime('%Y-%m-%d %H:%M:%S')}") logger.info(f"Execution time in seconds: {res.exec_time}") logger.info(f"Execution time in minutes: {res.exec_time / 60}") logger.info(f"Execution time in hours: {res.exec_time / (60 * 60)}") # logger.info(f"Number of generations: {res.algorithm.n_gen}") # logger.info(f"Number of generations", res.algorithm.termination) # Log optimum solutions logger.info("============ All opt solutions ============") for i, ind in enumerate(res.opt): logger.info(f'Opt refactoring sequence {i}:') logger.info(ind.X) logger.info(f'Opt refactoring sequence corresponding objectives vector {i}:') logger.info(ind.F) logger.info("-" * 75) # Log best refactorings logger.info("============ Best refactoring sequences (a set of non-dominated solutions) ============") for i, ind in enumerate(res.X): logger.info(f'Best refactoring sequence {i}:') logger.info(ind) logger.info("-" * 75) logger.info("============ Best objective values (a set of non-dominated solutions) ============") for i, ind_objective in enumerate(res.F): logger.info(f'Best refactoring sequence corresponding objectives vector {i}:') logger.info(ind_objective) logger.info("-" * 75) # Save best refactorings population_trimmed = [] objective_values_content = '' for chromosome in res.X: chromosome_new = [] if config.PROBLEM == 0: # i.e., single objective problem for gene_ in chromosome: chromosome_new.append((gene_.name, gene_.params)) else: for gene_ in chromosome[0]: chromosome_new.append((gene_.name, gene_.params)) population_trimmed.append(chromosome_new) for objective_vector in res.F: objective_values_content += f'{res.algorithm.n_gen + config.NGEN},' if config.PROBLEM == 0: objective_values_content += f'{objective_vector},' else: for objective_ in objective_vector: objective_values_content += f'{objective_},' objective_values_content += '\n' best_refactoring_sequences_path = os.path.join( config.PROJECT_LOG_DIR, f'best_refactoring_sequences_after_{res.algorithm.n_gen + config.NGEN}gens.json' ) with open(best_refactoring_sequences_path, mode='w', encoding='utf-8') as fp: json.dump(population_trimmed, fp, indent=4) best_refactoring_sequences_objectives_path = os.path.join( config.PROJECT_LOG_DIR, f'best_refactoring_sequences_objectives_after_{res.algorithm.n_gen + config.NGEN}gens.csv' ) with open(best_refactoring_sequences_objectives_path, mode='w', encoding='utf-8') as fp: fp.write(objective_values_content) try: pf = res.F # dm = HighTradeoffPoints() dm = get_decision_making("high-tradeoff") I = dm.do(pf) logger.info("============ High-tradeoff points refactoring sequences ============") for i, ind in enumerate(res.X[I]): logger.info(f'High tradeoff points refactoring sequence {i}:') logger.info(ind) logger.info("-" * 75) logger.info("============ High-tradeoff points objective values ============") for i, ind_objective in enumerate(pf[I]): logger.info(f'High-tradeoff points refactoring sequence corresponding objectives vector {i}:') logger.info(ind_objective) logger.info("-" * 75) logger.info("High-tradeoff points mean:") logger.info(np.mean(pf[I], axis=0)) logger.info("High-tradeoff points median:") logger.info(np.median(pf[I], axis=0)) # Save high-tradeoff refactorings population_trimmed = [] objective_values_content = '' for chromosome in res.X[I]: chromosome_new = [] if config.PROBLEM == 0: # i.e., single objective problem for gene_ in chromosome: chromosome_new.append((gene_.name, gene_.params)) else: for gene_ in chromosome[0]: chromosome_new.append((gene_.name, gene_.params)) population_trimmed.append(chromosome_new) for objective_vector in pf[I]: objective_values_content += f'{res.algorithm.n_gen + config.NGEN},' if config.PROBLEM == 0: objective_values_content += f'{objective_vector},' else: for objective_ in objective_vector: objective_values_content += f'{objective_},' objective_values_content += '\n' high_tradeoff_path = os.path.join( config.PROJECT_LOG_DIR, f'high_tradeoff_points_refactoring_after_{res.algorithm.n_gen + config.NGEN}gens.json' ) with open(high_tradeoff_path, mode='w', encoding='utf-8') as fp: json.dump(population_trimmed, fp, indent=4) high_tradeoff_path_objectives_path = os.path.join( config.PROJECT_LOG_DIR, f'high_tradeoff_points_after_{res.algorithm.n_gen + config.NGEN}gens.csv' ) with open(high_tradeoff_path_objectives_path, mode='w', encoding='utf-8') as fp: fp.write(objective_values_content) except: logger.error("No multi-optimal solutions (error in computing high tradeoff points)!")
def main(udb_path: str, children_classes: list, method_name: str, *args, **kwargs): """ """ if len(children_classes) <= 1: logger.error("len(children_classes) should be gte 2") return False # Initialize with understand destination_class = "" fileslist_to_be_rafeactored = set() fileslist_to_be_propagate = set() propagation_classes = set() db = und.open(udb_path) try: method_ents = [ db.lookup(i + "." + method_name, "method")[0] for i in children_classes ] except IndexError: # print([db.lookup(i + "." + method_name, "method") for i in children_classes]) logger.error( f"Method {method_name} does not exists in all children_classes.") db.close() return False # Get method text method_text = method_ents[0].contents().strip() for method_ent in method_ents: if method_ent.contents().strip() != method_text: logger.error("Method content is different.") db.close() return False for ref in method_ent.refs("Use,Call"): if ref.ent().parent() is not None: if ref.ent().parent().simplename() in children_classes: logger.error("Method has internal dependencies.") db.close() return False for mth in db.ents("Java Method"): for child in children_classes: if mth.longname().endswith(child + "." + method_name): fileslist_to_be_rafeactored.add( mth.parent().parent().longname()) for fth in mth.parent().refs("Extend"): destination_class = fth.ent().longname() fileslist_to_be_rafeactored.add( fth.ent().parent().longname()) for ref in mth.refs("Java Callby"): propagation_classes.add(ref.ent().parent().longname()) fileslist_to_be_propagate.add( ref.ent().parent().parent().longname()) db.close() # print("=========================================") # print("fileslist_to_be_propagate :", fileslist_to_be_propagate) # print("propagation_classes : ", propagation_classes) # print("fileslist_to_be_rafeactored :", fileslist_to_be_rafeactored) # print("father class :", destination_class) fileslist_to_be_rafeactored = list(fileslist_to_be_rafeactored) fileslist_to_be_propagate = list(fileslist_to_be_propagate) propagation_class = list(propagation_classes) # refactored start for file in fileslist_to_be_rafeactored: try: stream = FileStream(file, encoding='utf-8', errors='ignore') except: continue lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener_refactor = PullUpMethodRefactoringListener( common_token_stream=token_stream, destination_class=destination_class, children_class=children_classes, moved_methods=method_name, method_text=method_text) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener_refactor) with open(file, mode='w', encoding='utf-8', newline='') as f: f.write( my_listener_refactor.token_stream_rewriter.getDefaultText()) # end refactoring # beginning of propagate for file in fileslist_to_be_propagate: if not os.path.exists(file): continue stream = FileStream(file, encoding='utf-8', errors='ignore') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener_propagate = PropagationPullUpMethodRefactoringListener( token_stream_rewriter=token_stream, old_class_name=children_classes, new_class_name=destination_class, propagated_class_name=propagation_class) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener_propagate) with open(file, mode='w', encoding='utf8', errors='ignore', newline='') as f: f.write( my_listener_propagate.token_stream_rewriter.getDefaultText()) # end of propagate return True
def main(udb_path, source_package, source_class, method_name, target_classes: list, *args, **kwargs): """ The main API for the push-down method refactoring operation """ target_package = source_package source_method = method_name main_file = None source_method_entity = None is_static = False propagation_files = [] propagation_classes = [] propagation_lines = [] children_classes = [] children_files = [] # Initialize with understand db = und.open(udb_path) methods = db.ents("Java Method") for mth in methods: if mth.longname( ) == source_package + "." + source_class + "." + source_method: source_method_entity = mth for child_ref in mth.parent().refs("Extendby"): child_ref = child_ref.ent() if child_ref.simplename() in target_classes: children_classes.append(child_ref.simplename()) children_files.append(child_ref.parent().longname()) # print("mainfile : ", mth.parent().parent().longname()) is_static = mth.kind().check("static") main_file = mth.parent().parent().longname() for ref in mth.refs("Callby"): propagation_files.append( ref.ent().parent().parent().longname()) propagation_classes.append(ref.ent().parent().simplename()) propagation_lines.append(ref.line()) # Check pre-condition if not len(target_classes) == 1: logger.error(f"len(target_classes) is not 1.") db.close() return False if not len(children_classes) == 1: logger.error(f"len(children_classes) is not 1.") db.close() return False if not len(children_files) == 1: logger.error(f"len(children_files) is not 1.") db.close() return False for mth in methods: if mth.simplename() == source_method: if mth.parent().simplename() in target_classes: if mth.type() == source_method_entity.type(): if mth.kind() == source_method_entity.kind(): if mth.parameters() == source_method_entity.parameters( ): logger.error("Duplicated method") db.close() return False for ref in source_method_entity.refs("use, call"): ref_ent = ref.ent() is_public = ref_ent.kind().check("public") if not is_public: logger.error("Has internal dependencies.") db.close() return False # get text method_text = source_method_entity.contents() db.close() # Delete source method stream = FileStream(main_file, encoding='utf8', errors='ignore') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener = DeleteSourceListener(common_token_stream=token_stream, source_method=source_method) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) # print(my_listener.token_stream_rewriter.getDefaultText()) with open(main_file, mode='w', encoding='utf-8', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText()) # Do the push down for child_file, child_class in zip(children_files, children_classes): stream = FileStream(child_file, encoding='utf8', errors='ignore') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() my_listener = PushDownMethodRefactoringListener( common_token_stream=token_stream, source_class=child_class, source_method_text=method_text) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) # print(my_listener.token_stream_rewriter.getDefaultText()) with open(child_file, mode='w', encoding='utf8', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText()) # Propagation for file, _class, line in zip(propagation_files, propagation_classes, propagation_lines): stream = FileStream(file, encoding='utf8', errors='ignore') lexer = JavaLexer(stream) token_stream = CommonTokenStream(lexer) parser = JavaParserLabeled(token_stream) parser.getTokenStream() parse_tree = parser.compilationUnit() if is_static: my_listener = PropagationStaticListener( common_token_stream=token_stream, source_class=source_class, child_class=children_classes[0], class_name=_class, method_name=source_method, ref_line=line, target_package=target_package) else: my_listener = PropagationNonStaticListener( common_token_stream=token_stream, source_class=source_class, child_class=children_classes[0], class_name=_class, method_name=source_method, ref_line=line, target_package=target_package) walker = ParseTreeWalker() walker.walk(t=parse_tree, listener=my_listener) # print(my_listener.token_stream_rewriter.getDefaultText()) with open(file, mode='w', encoding='utf8', errors='ignore', newline='') as f: f.write(my_listener.token_stream_rewriter.getDefaultText()) return True
def main(source_class: str, source_package: str, target_class: str, target_package: str, field_name: str, udb_path: str, *args, **kwargs): """ Move filed main API """ import_statement = None if source_package != target_package: import_statement = f"\nimport {target_package}.{target_class};" instance_name = target_class.lower() + "ByCodArt" db = und.open(udb_path) # Check if field is static field_ent = db.lookup(f"{source_package}.{source_class}.{field_name}", "Variable") if len(field_ent) == 0: logger.error(f"Entity not found with query: {source_package}.{source_class}.{field_name}.") db.close() return False if source_package == target_package and source_class == target_class: logger.error("Can not move to self.") db.close() return False field_ent = field_ent[0] is_static = field_ent.kindname() == STATIC if is_static: logger.warning("Field is static!") # Find usages usages = {} for ref in field_ent.refs("Setby, Useby"): file = ref.file().longname() if file in usages: usages[file].append(ref.line()) else: usages[file] = [ref.line(), ] try: src_class_file = db.lookup(f"{source_package}.{source_class}.java")[0].longname() target_class_file = db.lookup(f"{target_package}.{target_class}.java")[0].longname() except IndexError: logger.error("This is a nested class.") logger.info(f"{source_package}.{source_class}.java") logger.info(f"{target_package}.{target_class}.java") db.close() return False db.close() # Check if there is an cycle listener = parse_and_walk( file_path=target_class_file, listener_class=CheckCycleListener, class_name=source_class, ) if not listener.is_valid: logger.error(f"Can not move field because there is a cycle between {source_class}, {target_class}") # db.close() return False # Propagate Changes for file in usages.keys(): parse_and_walk( file_path=file, listener_class=PropagateListener, has_write=True, field_name=field_name, new_name=f"{instance_name}.{field_name}", lines=usages[file], ) # Do the cut and paste! # Cut listener = parse_and_walk( file_path=src_class_file, listener_class=CutFieldListener, has_write=True, class_name=target_class, instance_name=instance_name, field_name=field_name, is_static=is_static, import_statement=import_statement ) field_text = listener.field_text # Paste parse_and_walk( file_path=target_class_file, listener_class=PasteFieldListener, has_write=True, field_text=field_text, ) # db.close() return True