def __init__(self, common_token_stream: CommonTokenStream = None, package_name: str = None, source_class_name: str = None, field_identifier: str = None): """ :param common_token_stream: contains the program tokens :param source_class_name: contains the enclosing class of the field :param field_identifier: the field name to be encapsulated """ self.token_stream = common_token_stream if package_name is None: self.package_name = '' else: self.package_name = package_name self.source_class_name = source_class_name self.field_identifier = field_identifier self.getter_exist = False self.setter_exist = False self.in_source_class = False self.in_selected_package = True if self.package_name == '' else False # Move all the tokens in the source code in a buffer, token_stream_rewriter. if common_token_stream is not None: self.token_stream_rewriter = \ TokenStreamRewriter(common_token_stream) else: raise TypeError('common_token_stream is None')
def __propagate(self, program: Program, rewriter: Rewriter): """ :param rewriter: The rewriter object which is going to rewrite the files :param usages: the usages of the field in the program :return: void Propagates the changes made to the files and the field """ modified_files = [] for pkg in program.packages.values(): for klass in pkg.classes.values(): if not hasattr(klass, "usages"): continue modified_files.append(self.filename_mapper(klass.filename)) for usage in klass.usages: tokens_info = TokensInfo(usage) token_stream = usage.parser.getTokenStream() if token_stream not in rewriter.token_streams.keys(): rewriter.token_streams[token_stream] = ( usage["meta_data"].filename, TokenStreamRewriter(token_stream), usage["meta_data"].filename) rewriter.replace( tokens_info, f'{self.target_class_name}.{self.field_name}') if klass.name == self.target_class_name: continue if self.should_add_import(klass): self.__add_import(klass, rewriter) return modified_files
def __init__(self, common_token_stream: CommonTokenStream = None, source_class=None, field_name: str = None): """The Main listener which parses the file based on the provided information, using ANTLR parser generator and tokenization methods. Detects the desired field and removes the "final" keyword from its properties. Args: common_token_stream (CommonTokenStream): A stream of tokens generated by parsing the main file using the ANTLR parser generator source_class (str): Name of the class in which the refactoring has to be done field_name (str):Name of the field whose final status has to be changed Returns: No returns """ if field_name is None: self.field_name = "" else: self.field_name = field_name if source_class is None: self.source_class = "" else: self.source_class = source_class if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) self.is_source_class = False self.is_final = False
def __init__(self, common_token_stream: CommonTokenStream = None, class_name: str = "Main", new_method_name: str = "newMethod"): """ :param common_token_stream: :param class_name: the name of the class that duplications should be considered :param new_method_name: the name of the new method that contains that statements """ self.common_token_stream = common_token_stream self.tokens = common_token_stream.tokens self.refactor_class_name = class_name self.new_method_name = new_method_name # make a copy of the tokens if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_re_writer = TokenStreamRewriter( common_token_stream) self.method_statements = { } # dictionary that maps the methods to its statements # tree helper variables self.is_in_target_class = False self.is_in_a_method = False self.current_method_name = "" # refactoring self.duplicates = None # if it is None, then we don't have any duplications
def __init__(self, common_token_stream: CommonTokenStream = None, package_name: str = None, class_identifier: str = None, class_new_name: str = None): """ :param common_token_stream: """ self.token_stream = common_token_stream self.class_new_name = class_new_name self.class_identifier = class_identifier self.package_identifier = package_name self.is_package_imported = False self.in_selected_package = False self.in_selected_class = False self.in_some_package = False # Move all the tokens in the source code in a buffer, token_stream_rewriter. if common_token_stream is not None: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) else: raise TypeError('common_token_stream is None')
def __init__(self, common_token_stream: CommonTokenStream = None, source_class="", source_method="", argument_name: str = ""): """create removeflaglistener to extract and edit needed pattern Args: common_token_stream (CommonTokenStream, optional): default token stream passed by higher level api. Defaults to None. source_class (str, optional): name of the class which method rests in. Defaults to "". source_method (str, optional): name of the method to be edited. Defaults to "". argument_name (str, optional): name of the boolean argument which branchs the logic. Defaults to "". Raises: ValueError: if no common token stream is find will be raised since its is essential to the process """ self.argument_name = argument_name self.source_method = source_method self.source_class = source_class self.token_stream_rewriter_changed = False if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) self.common_token_stream = common_token_stream self.is_source_class = False self.is_source_method = False self.is_if_block = False self.is_else_block = False
def __init__(self, common_token_stream: CommonTokenStream = None, package_identifier: str = None, package_new_name: str = None, packages_name: list = []): """ Args: common_token_stream (CommonTokenStream): An instance of ANTLR4 CommonTokenStream class package_identifier(str): Name of the package in which the refactoring has to be done package_new_name(str): The new name of the refactored method packages_name(str): Name of the packages in which the refactoring has to be done Returns: RenamePackageRefactoringListener: An instance of RenamePackageRefactoringListener class """ self.token_stream = common_token_stream self.package_identifier = package_identifier self.package_new_name = package_new_name self.packages_name = packages_name self.is_in_scope = False # Move all the tokens in the source code in a buffer, token_stream_rewriter. if common_token_stream is not None: self.token_stream_rewriter = TokenStreamRewriter(common_token_stream) else: raise TypeError('common_token_stream is None')
def __init__(self, common_token_stream: CommonTokenStream = None, method_identifier: str = None): """ :param common_token_stream: """ self.i = 0 self.enter_method = False self.token_stream = common_token_stream self.method_identifier = method_identifier self.class_identifier = "" self.currentClass = 1 self.enter_class = False self.method_selected = False self.ifelse = False self.inputPara = False self.newClasses = "" self.interface = "" self.para = "" self.newPara = [] self.oldPara = [] self.typePara = [] self.old_method_Declaration = "" self.new_method_Declaration = "" # Move all the tokens in the source code in a buffer, token_stream_rewriter. if common_token_stream is not None: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) else: raise TypeError('common_token_stream is None')
def __init__(self, common_token_stream: CommonTokenStream = None, source_class=None, field_name: str = None): """To implement Increase Field Visibility refactoring based on its actors. Detects the required field and increases/changes its visibility status. Args: common_token_stream (CommonTokenStream): A stream of tokens generated by parsing the main file using the ANTLR parser generator source_class (str): Name of the class in which the refactoring has to be done field_name (str): Name of the field whose visibility status has to be changed Returns: No returns """ if field_name is None: self.field_name = "" else: self.field_name = field_name if source_class is None: self.source_class = "" else: self.source_class = source_class if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter(common_token_stream) self.is_source_class = False self.detected_field = None self.detected_method = None self.TAB = "\t" self.NEW_LINE = "\n" self.code = "" self.tempdeclarationcode = ""
def __init__(self, common_token_stream: CommonTokenStream = None, source_class=None, propagated_class_name=None): """Used for propagation purposes in the other classes of the project: Detect the objects which have to be propagated Args: common_token_stream (CommonTokenStream): A stream of tokens generated by parsing the main file using the ANTLR parser generator source_class (str): Name of the class in which the propagation has to be implemented propagated_class_name (str): Name of the class which has to be propagated Returns: No returns """ if source_class is None: self.source_class = [] else: self.source_class = source_class if propagated_class_name is None: self.propagated_class_name = [] else: self.propagated_class_name = propagated_class_name if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter(common_token_stream) self.is_class = False self.current_class = '' self.objects = list()
def __init__(self, sai, stream): self.sai = sai # Call by Reference : StaticAnalysisInfo self.stream = stream self.rewrite = TokenStreamRewriter(stream) self.fi = None #Function self.func_flag = False self.fbody_flag = False self.gpu_func = False self.equal_flag = False self.first_left = False self.first_right = False self.rw_equal_flag = False self.prev_value = None self.postfix_count = 0 self.alias_init = None self.postfix_call_count = 0 self.call_list = list() self.jump_flag = False self.selstatcond = False
def __init__(self, common_token_stream: CommonTokenStream = None, source_class=None, field_name: str = None): """ Args: common_token_stream (CommonTokenStream): A stream of tokens generated by parsing the main file using \ the ANTLR parser generator. source_class (str): Name of the class in which the refactoring has to be done. field_name (str): Name of the field whose final status has to be changed. Returns: object (MakeFieldFinalRefactoringListener): An instance of MakeFieldFinalRefactoringListener. """ if field_name is None: self.field_name = "" else: self.field_name = field_name if source_class is None: self.source_class = "" else: self.source_class = source_class if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter(common_token_stream) self.is_source_class = False self.is_final = False
def __init__(self, common_token_stream: CommonTokenStream, method_map: dict, source_class: str, moved_fields: list): self.token_stream_rewriter = TokenStreamRewriter(common_token_stream) self.method_map = method_map self.source_class = source_class self.moved_fields = moved_fields self.fields = None
def __init__(self, common_token_stream: CommonTokenStream = None, Source_class=None, object_name=None, propagated_class_name=None): if Source_class is None: self.source_class = [] else: self.source_class = Source_class if object_name is None: self.object_name = [] else: self.object_name = object_name if propagated_class_name is None: self.propagated_class_name = [] else: self.propagated_class_name = propagated_class_name if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) self.is_class = False
def __init__(self, common_token_stream: CommonTokenStream = None, target_class: str = None, target_methods: list = None): if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) if target_class is None: raise ValueError("source_class is None") else: self.target_class = target_class if target_methods is None or len(target_methods) == 0: raise ValueError("target method must have one method name") else: self.target_methods = target_methods self.is_target_class = False self.detected_instance_of_target_class = [] self.TAB = "\t" self.NEW_LINE = "\n" self.code = ""
def __init__(self, common_token_stream: CommonTokenStream = None, class_name: str = None): """ """ if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) if class_name is None: raise ValueError("source_class is None") else: self.objective_class = class_name self.is_objective_class = False self.detected_field = None self.detected_method = None self.TAB = "\t" self.NEW_LINE = "\n" self.code = ""
def __init__(self, field_identifier: str, new_field_identifier: str, class_identifier: str, common_token_stream: CommonTokenStream = None, extentions=[], implementations=[]): self.enter_class = False self.new_field_identifier = new_field_identifier self.class_identifier = class_identifier self.field_identifier = field_identifier self.scope_handler = ScopeHandler() self.symbol_table = SymbolTable() self.last_used_type = None self.extentions = extentions self.implementations = implementations self.seen_classes = [] if common_token_stream is not None: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) else: raise TypeError('common_token_stream is None')
def __init__(self, common_token_stream: CommonTokenStream = None, source_class=None, method_name: str = None): """ """ if method_name is None: self.method_name = "" else: self.method_name = method_name if source_class is None: self.source_class = "" else: self.source_class = source_class if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) self.is_source_class = False self.is_final = False
def __init__(self, java_file_path, common_token_stream, scope_class_name, target_method_name, new_name, reference=None): """The Main listener which parses the file based on the provided information, using ANTLR parser generator and tokenization methods Args: java_file_path(str): Address path to the test/source file scope_class_name(str): Name of the class in which the refactoring has to be done target_method_name(str): Name of the method in which the refactoring has to be done new_name(str): The new name of the refactored method Returns: No returns """ self.file_path = java_file_path self.token_stream = common_token_stream self.token_stream_rewriter = TokenStreamRewriter(common_token_stream) self.class_name = scope_class_name self.method_name = target_method_name self.new_method_name = new_name self.in_class = False self.changed = False self.reference = reference
def __init__(self, common_token_stream: CommonTokenStream = None, target_class: str = None, target_method: str = None, target_parameters: list = None): if common_token_stream is None: raise ValueError("common token stream is None") else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) if target_class is None: raise ValueError("target class is None") else: self.target_class = target_class if target_method is None: raise ValueError("target method is None") else: self.target_method = target_method if target_parameters is None: self.target_parameters = [] else: self.target_parameters = target_parameters self.current_class = None self.current_method = None self.current_method_call = None self.target_method_obj = None self.removed_expressions = [] self.local_variables = [] self.add_to_target_method = [] self.index_of_parameter = 0
def __init__(self, token_stream_rewriter: CommonTokenStream = None, old_class_name: list = None, new_class_name: str = None, propagated_class_name: list = None): if propagated_class_name is None: self.propagated_class_name = [] else: self.propagated_class_name = propagated_class_name if new_class_name is None: self.new_class_name = [] else: self.new_class_name = new_class_name if old_class_name is None: self.old_class_name = [] else: self.old_class_name = old_class_name if token_stream_rewriter is None: raise ValueError('token_stream_rewriter is None') else: self.token_stream_rewriter = TokenStreamRewriter( token_stream_rewriter) self.is_class = False self.detected_field = None self.detected_method = None self.TAB = "\t" self.NEW_LINE = "\n" self.code = "" self.tempdeclarationcode = "" self.method_text = ""
def __init__(self, common_token_stream: CommonTokenStream = None, target_class: str = None, target_method: str = None, target_parameters: list = None): if common_token_stream is None: raise ValueError("common_token_stream is None") else: self.token_stream_rewriter = TokenStreamRewriter(common_token_stream) if target_class is None: raise ValueError("target class is None") else: self.target_class = target_class if target_method is None: raise ValueError("target method is None") else: self.target_method = target_method if target_parameters is None: self.target_parameters = [] else: self.target_parameters = target_parameters self.current_class = None self.current_method = None self.current_method_call = None self.target_method_ctx = None self.removed_expressions = [] self.all_local_variable_declarators = [] self.add_to_top_of_target_method = [] self.TAB = "\t" self.NEW_LINE = "\n" self.code = ""
def __init__(self, common_token_stream: CommonTokenStream = None, source_class: str = None, moved_method=None): if moved_method is None: self.moved_methods = [] else: self.moved_methods = moved_method if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) if source_class is None: raise ValueError("source_class is None") else: self.source_class = source_class self.is_source_class = False self.detected_field = None self.detected_method = None self.TAB = "\t" self.NEW_LINE = "\n" self.code = "" self.tempdeclarationcode = "" self.method_text = ""
def __init__(self, common_token_stream: CommonTokenStream = None, class_identifier: str = None, method_name: str = '', new_method_name: str = '', is_static=False, extentions=[], implementations=[]): self.enter_class = False if class_identifier else True self.token_stream = common_token_stream self.method_name = method_name self.new_method_name = new_method_name self.class_identifier = class_identifier self.scope_handler = ScopeHandler() self.symbol_table = SymbolTable() self.last_used_type = None self.is_static = is_static self.extentions = extentions self.implementations = implementations self.abort = (self.method_name == self.class_identifier) if common_token_stream is not None: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) else: raise TypeError('common_token_stream is None')
def __init__(self, common_token_stream: CommonTokenStream = None, destination_class: str = None, children_class: list = None, moved_methods=None, method_text: str = None): if method_text is None: self.mothod_text = [] else: self.method_text = method_text if moved_methods is None: self.moved_methods = [] else: self.moved_methods = moved_methods if children_class is None: self.children_class = [] else: self.children_class = children_class if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter(common_token_stream) if destination_class is None: raise ValueError("source_class is None") else: self.destination_class = destination_class self.is_children_class = False self.detected_field = None self.detected_method = None self.TAB = "\t" self.NEW_LINE = "\n" self.code = "" self.tempdeclarationcode = ""
def __init__(self, common_token_stream: CommonTokenStream, source_method: str): """ """ self.token_stream_rewriter = TokenStreamRewriter(common_token_stream) self.source_method = source_method
def testInsertBeforeIndexZero(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.insertBeforeIndex(0, '0') self.assertEquals(rewriter.getDefaultText(), '0abc')
def testInsertAfterLastIndex(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.insertAfter(10, 'x') self.assertEquals(rewriter.getDefaultText(), 'abcx')
def __init__(self, common_token_stream: CommonTokenStream = None, source_class: str = None, source_class_data: dict = None, target_class: str = None, target_class_data: dict = None, is_complete: bool = False): """ """ if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) if source_class is None: raise ValueError("source_class is None") else: self.source_class = source_class if target_class is None: raise ValueError("new_class is None") else: self.target_class = target_class if target_class: self.target_class = target_class if source_class_data: self.source_class_data = source_class_data else: self.source_class_data = { 'fields': [], 'methods': [], 'constructors': [] } if target_class_data: self.target_class_data = target_class_data else: self.target_class_data = { 'fields': [], 'methods': [], 'constructors': [] } self.field_that_has_source = [] self.has_source_new = False self.is_complete = is_complete self.is_target_class = False self.is_source_class = False self.detected_field = None self.detected_method = None self.TAB = "\t" self.NEW_LINE = "\n" self.code = ""
def testReplaceMiddleIndex(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceIndex(1, 'x') self.assertEquals(rewriter.getDefaultText(), 'axc')