def testInsertBeforeIndexZero(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.insertBeforeIndex(0, '0') self.assertEqual(rewriter.getDefaultText(), '0abc')
def testInsertAfterLastIndex(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.insertAfter(10, 'x') self.assertEqual(rewriter.getDefaultText(), 'abcx')
def testReplaceSubsetThenFetch(self): input = InputStream('abcccba') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(2, 4, 'xyz') self.assertEqual('abxyzba', rewriter.getDefaultText())
def testReplaceMiddleIndex(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceIndex(1, 'x') self.assertEqual(rewriter.getDefaultText(), 'axc')
def testReplaceAll(self): input = InputStream('abcccba') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(0, 6, 'x') self.assertEqual('x', rewriter.getDefaultText())
def testCombineInserts(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.insertBeforeIndex(0, 'x') rewriter.insertBeforeIndex(0, 'y') self.assertEqual('yxabc', rewriter.getDefaultText())
def test2InsertMiddleIndex(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.insertBeforeIndex(1, 'x') rewriter.insertBeforeIndex(1, 'y') self.assertEquals('ayxbc', rewriter.getDefaultText())
def testDropIdenticalReplace(self): input = InputStream('abcc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(1, 2, 'foo') rewriter.replaceRange(1, 2, 'foo') self.assertEqual('afooc', rewriter.getDefaultText())
def testOverlappingReplace4(self): input = InputStream('abcc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(1, 2, 'foo') rewriter.replaceRange(1, 3, 'bar') self.assertEqual('abar', rewriter.getDefaultText())
def testToStringStartStop(self): input = InputStream('x = 3 * 0;') lexer = TestLexer2(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(4, 8, '0') self.assertEqual(rewriter.getDefaultText(), 'x = 0;') self.assertEqual(rewriter.getText('default', 0, 9), 'x = 0;') self.assertEqual(rewriter.getText('default', 4, 8), '0')
def testPreservesOrderOfContiguousInserts(self): """ Test for fix for: https://github.com/antlr/antlr4/issues/550 """ input = InputStream('aa') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.insertBeforeIndex(0, '<b>') rewriter.insertAfter(0, '</b>') rewriter.insertBeforeIndex(1, '<b>') rewriter.insertAfter(1, '</b>') self.assertEqual('<b>a</b><b>a</b>', rewriter.getDefaultText())
def testOverlappingReplace2(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(0, 3, 'bar') rewriter.replaceRange(1, 2, 'foo') with self.assertRaises(ValueError) as ctx: rewriter.getDefaultText() self.assertEqual( """replace op boundaries of <ReplaceOp@[@1,1:1='b',<2>,1:1]..[@2,2:2='c',<3>,1:2]:"foo"> overlap with previous <ReplaceOp@[@0,0:0='a',<1>,1:0]..[@3,3:2='<EOF>',<-1>,1:3]:"bar">""", str(ctx.exception) )
def testReplaceThenReplaceLowerIndexedSuperset(self): input = InputStream('abcccba') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(2, 4, 'xyz') rewriter.replaceRange(1, 3, 'foo') with self.assertRaises(ValueError) as ctx: rewriter.getDefaultText() msg = str(ctx.exception) self.assertEqual( """replace op boundaries of <ReplaceOp@[@1,1:1='b',<2>,1:1]..[@3,3:3='c',<3>,1:3]:"foo"> overlap with previous <ReplaceOp@[@2,2:2='c',<3>,1:2]..[@4,4:4='c',<3>,1:4]:"xyz">""", msg )
class MakeConcreteClassRefactoringListener(JavaParserLabeledListener): """ To implement Make Class Concrete refactoring based on its actors. """ def __init__(self, common_token_stream: CommonTokenStream = None, class_name: str = None): """ """ if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) if class_name is None: raise ValueError("source_class is None") else: self.objective_class = class_name self.is_objective_class = False self.detected_field = None self.detected_method = None self.TAB = "\t" self.NEW_LINE = "\n" self.code = "" def enterTypeDeclaration(self, ctx: JavaParserLabeled.TypeDeclarationContext): if self.objective_class == ctx.classDeclaration().IDENTIFIER().getText( ): for i in range(0, len(ctx.classOrInterfaceModifier())): if ctx.classOrInterfaceModifier(i).getText() == "abstract": self.token_stream_rewriter.replaceRange( from_idx=ctx.classOrInterfaceModifier( i).start.tokenIndex, to_idx=ctx.classOrInterfaceModifier(i).stop.tokenIndex, text="")
def __init__(self, common_token_stream: CommonTokenStream = None, scope_class_name: str = None, field_identifier: str = None, field_new_name: str = None): self.token_stream = common_token_stream self.scope_class_name = scope_class_name self.field_identifier = field_identifier self.field_new_name = field_new_name self.is_in_scope = False # Move all the tokens in the source code in a buffer, token_stream_rewriter. if common_token_stream is not None: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) else: raise TypeError('common_token_stream is None')
def __init__(self, common_token_stream: CommonTokenStream = None, source_class=None, field_name: str = None): if field_name is None: self.field_name = "" else: self.field_name = field_name if source_class is None: self.source_class = "" else: self.source_class = source_class if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter(common_token_stream) self.is_source_class = False self.is_static = False
def __init__(self, common_token_stream: CommonTokenStream = None, target_class: str = None): if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.codeRewrite = TokenStreamRewriter(common_token_stream) if target_class is None: raise ValueError("source_class is None") else: self.target_class = target_class self.is_target_class = False self.have_constructor = False self.new_factory_function = False self.new_parameters = [] self.new_parameters_names = []
def __init__( self, common_token_stream: CommonTokenStream = None, source_class: str = None, source_class_data: dict = None, target_class: str = None, target_class_data: dict = None, is_complete: bool = False): """ """ if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter(common_token_stream) if source_class is None: raise ValueError("source_class is None") else: self.source_class = source_class if target_class is None: raise ValueError("new_class is None") else: self.target_class = target_class if target_class: self.target_class = target_class if source_class_data: self.source_class_data = source_class_data else: self.source_class_data = {'fields': [], 'methods': [], 'constructors': []} if target_class_data: self.target_class_data = target_class_data else: self.target_class_data = {'fields': [], 'methods': [], 'constructors': []} self.field_that_has_source = [] self.has_source_new = False self.is_complete = is_complete self.is_target_class = False self.is_source_class = False self.detected_field = None self.detected_method = None self.TAB = "\t" self.NEW_LINE = "\n" self.code = ""
def __init__(self, common_token_stream: CommonTokenStream = None, class_identifier: str = None, source_package: str = None, target_package: str = None, filename: str = None, has_import: bool = None): """ :param common_token_stream: """ self.token_stream = common_token_stream # Move all the tokens in the source code in a buffer, token_stream_rewriter. if common_token_stream is not None: self.token_stream_rewriter = TokenStreamRewriter(common_token_stream) else: raise TypeError('common_token_stream is None') if class_identifier is not None: self.class_identifier = class_identifier else: raise ValueError("class_identifier is None") if filename is not None: self.filename = filename else: raise ValueError("filename is None") if has_import is not None: self.has_import = has_import else: raise ValueError("has_import is None") if source_package is not None: self.source_package = source_package else: raise ValueError("source_package is None") if target_package is not None: self.target_package = target_package else: raise ValueError("target_package is None") self.need_import = False self.TAB = "\t" self.NEW_LINE = "\n" self.code = "" self.mul_imports = [] self.exact_imports = []
class MakeAbstractClassRefactoringListener(JavaParserLabeledListener): """ To implement Make Class Abstract refactoring based on its actors. """ def __init__(self, common_token_stream: CommonTokenStream = None, class_name: str = None): """ """ if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) if class_name is None: raise ValueError("source_class is None") else: self.objective_class = class_name self.is_objective_class = False self.detected_field = None self.detected_method = None self.TAB = "\t" self.NEW_LINE = "\n" self.code = "" def enterClassDeclaration(self, ctx: JavaParserLabeled.ClassDeclarationContext): print(ctx.IDENTIFIER().getText()) if self.objective_class == ctx.IDENTIFIER().getText(): print("&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&") print(ctx.CLASS().getText()) self.token_stream_rewriter.replaceRange(from_idx=0, to_idx=0, text="abstract " + ctx.CLASS().getText())
def __init__(self, common_token_stream: CommonTokenStream = None, source_class: str = None, children_class=None, moved_fields=None, fieldtext=None): if moved_fields is None: self.moved_fields = [] else: self.moved_fields = moved_fields if children_class is None: self.moved_fields = [] else: self.children_class = children_class if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) if source_class is None: raise ValueError("source_class is None") else: self.source_class = source_class # if fieldtext is None: raise ValueError("fieldtext is None") else: self.fieldtext = fieldtext # # if destination_class is None: # raise ValueError("new_class is None") # else: # self.destibation_class = destination_class self.is_source_class = False self.detected_field = None self.detected_method = None self.TAB = "\t" self.NEW_LINE = "\n" self.code = "" self.tempdeclarationcode = "" self.field_text = ""
def __init__(self, common_token_stream: CommonTokenStream = None, source_class: str = None, new_class: str = None, moved_fields=None, moved_methods=None, output_path: str = ""): if moved_methods is None: self.moved_methods = [] else: self.moved_methods = moved_methods if moved_fields is None: self.moved_fields = [] else: self.moved_fields = moved_fields if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) if source_class is None: raise ValueError("source_class is None") else: self.source_class = source_class if new_class is None: raise ValueError("new_class is None") else: self.new_class = new_class self.output_path = output_path self.is_source_class = False self.detected_field = None self.detected_method = None self.TAB = "\t" self.NEW_LINE = "\n" self.code = "" self.scope = [] self.aul = AllUsageList()
def save(rewriter: TokenStreamRewriter, file_name: str, filename_mapping=lambda x: x + ".rewritten.java"): new_filename = filename_mapping(file_name).replace("\\", "/") path = new_filename[:new_filename.rfind('/')] if not os.path.exists(path): os.makedirs(path) with open(new_filename, mode='w', newline='') as file: print("write?", new_filename) file.write(rewriter.getDefaultText())
def main(_): subject = """<?php $heredocString = <<<TXT HEREDOC TEXT TXT; $heredocString = <<<TXT HEREDOC TEXT TXT ; """ lexer = PhpLexer(InputStream(subject)) stream = CommonTokenStream(lexer) writer = TokenStreamRewriter(stream) sys.stdout.write( writer.getText(TokenStreamRewriter.DEFAULT_PROGRAM_NAME, (0, 100)))
def __init__(self, common_token_stream: CommonTokenStream, source_class: str, child_class: str, class_name: str, method_name: str, ref_line: int, target_package: str): self.token_stream_rewriter = TokenStreamRewriter(common_token_stream) self.source_class = source_class self.child_class = child_class self.class_name = class_name self.method_name = method_name self.ref_line = ref_line self.target_package = target_package self.start = None self.stop = None self.is_safe = False self.need_cast = False self.variable = None self.detected_class = False self.detected_package = False self.import_end = None
def __init__(self, common_token_stream: CommonTokenStream = None, package_identifier: str = None, package_new_name: str = None, packages_name: list = []): """ :param common_token_stream: """ self.token_stream = common_token_stream self.package_identifier = package_identifier self.package_new_name = package_new_name self.packages_name = packages_name self.is_in_scope = False # Move all the tokens in the source code in a buffer, token_stream_rewriter. if common_token_stream is not None: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) else: raise TypeError('common_token_stream is None')
def __init__(self, common_token_stream: CommonTokenStream = None, destination_class: str = None, children_class: list = None, moved_methods=None, method_text: str = None): """ """ if method_text is None: self.mothod_text = [] else: self.method_text = method_text if moved_methods is None: self.moved_methods = [] else: self.moved_methods = moved_methods if children_class is None: self.children_class = [] else: self.children_class = children_class if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) if destination_class is None: raise ValueError("source_class is None") else: self.destination_class = destination_class self.is_children_class = False self.detected_field = None self.detected_method = None self.TAB = "\t" self.NEW_LINE = "\n" self.code = "" self.tempdeclarationcode = ""
class RenamePackageRefactoringListener(JavaParserLabeledListener): def __init__(self, common_token_stream: CommonTokenStream = None, package_identifier: str = None, package_new_name: str = None, packages_name: list = []): """ :param common_token_stream: """ self.token_stream = common_token_stream self.package_identifier = package_identifier self.package_new_name = package_new_name self.packages_name = packages_name self.is_in_scope = False # Move all the tokens in the source code in a buffer, token_stream_rewriter. if common_token_stream is not None: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) else: raise TypeError('common_token_stream is None') def enterPackageDeclaration( self, ctx: JavaParserLabeled.PackageDeclarationContext): if self.package_identifier == ctx.qualifiedName().IDENTIFIER( )[-1].getText(): if self.package_new_name not in self.packages_name: self.token_stream_rewriter.replaceIndex( index=ctx.qualifiedName().start.tokenIndex + (2 * len(ctx.qualifiedName().IDENTIFIER()) - 2), text=self.package_new_name) print("package changed") def enterImportDeclaration( self, ctx: JavaParserLabeled.ImportDeclarationContext): if ctx.qualifiedName().IDENTIFIER()[-1].getText( ) == self.package_identifier: if self.package_new_name not in self.packages_name: self.token_stream_rewriter.replaceIndex( index=ctx.qualifiedName().start.tokenIndex + (2 * len(ctx.qualifiedName().IDENTIFIER()) - 2), text=self.package_new_name) print("package name in import changed")
class RemoveClassRefactoringListener(JavaParserLabeledListener): """ To implement extract class refactoring based on its actors. Creates a new class and move fields and methods from the old class to the new one """ def __init__(self, common_token_stream: CommonTokenStream = None, class_name: str = None): if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) if class_name is None: raise ValueError("source_class is None") else: self.objective_class = class_name self.is_objective_class = False self.detected_field = None self.detected_method = None self.TAB = "\t" self.NEW_LINE = "\n" self.code = "" def enterClassDeclaration(self, ctx: JavaParserLabeled.ClassDeclarationContext): print("self.objective_class: ", self.objective_class) class_identifier = ctx.IDENTIFIER().getText() ctxparent = ctx.parentCtx if self.objective_class == class_identifier: start_index = ctxparent.start.tokenIndex stop_index = ctxparent.stop.tokenIndex self.token_stream_rewriter.delete( program_name=self.token_stream_rewriter.DEFAULT_PROGRAM_NAME, from_idx=start_index, to_idx=stop_index) self.detected_method = None
def __init__(self, common_token_stream: CommonTokenStream = None, child_class=None): if child_class is None: self.moved_methods = [] else: self.child_class = child_class if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) self.is_children_class = False self.detected_field = None self.detected_method = None self.TAB = "\t" self.NEW_LINE = "\n" self.methodcode = ""
def __init__(self, common_token_stream: CommonTokenStream, child=None, field=None): if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) if child is None: raise ValueError("source_class is None") else: self.child = child if field is None: raise ValueError("field is None") else: self.field = field self.field_text = ""
def __init__(self, common_token_stream: CommonTokenStream = None, SuperClass_identifier: str = None, SubClass_identifier: list = None): """ :param common_token_stream: """ self.enter_class = False self.token_stream = common_token_stream self.SuperClass_identifier = SuperClass_identifier self.SubClass_identifier = SubClass_identifier self.InSuperClass = False self.InSubClass = False self.InMainClass = False self.CurrentCC = None self.Visitors = {} # Move all the tokens in the source code in a buffer, token_stream_rewriter. if common_token_stream is not None: self.token_stream_rewriter = TokenStreamRewriter(common_token_stream) else: raise TypeError('common_token_stream is None')
def __init__(self, common_token_stream: CommonTokenStream = None, parent_class=None, child_class=None, field_text: str = None, method_text: str = None): if method_text is None: self.mothod_text = [] else: self.method_text = method_text if field_text is None: self.field_text = [] else: self.field_text = field_text if parent_class is None: self.parent_class = [] else: self.parent_class = parent_class if child_class is None: self.child_class = [] else: self.child_class = child_class if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) if parent_class is None: raise ValueError("destination_class is None") else: self.parent_class = parent_class self.is_parent_class = False self.is_child_class = False self.detected_field = None self.detected_method = None
def __init__(self, common_token_stream: CommonTokenStream = None, package_name: str = None, scope_class_name: str = None, method_identifier: str = None, method_new_name: str = None): """ Initializer of rename method refactoring listener Args: common_token_stream (CommonTokenStream): An instance of ANTLR4 CommonTokenStream class package_name(str): Name of the package in which the refactoring has to be done scope_class_name(str): Name of the class in which the refactoring has to be done method_identifier(str): Name of the method in which the refactoring has to be done method_new_name(str): The new name of the refactored method Returns: RenameMethodListener: An instance of RenameMethodListener class """ self.token_stream = common_token_stream self.class_identifier = scope_class_name self.method_identifier = method_identifier self.method_new_name = method_new_name self.package_identifier = package_name self.is_package_imported = False self.in_class = False self.in_selected_package = False # Move all the tokens in the source code in a buffer, token_stream_rewriter. if common_token_stream is not None: self.token_stream_rewriter = TokenStreamRewriter(common_token_stream) else: raise TypeError('common_token_stream is None')
def __init__(self, common_token_stream: CommonTokenStream = None, package_name: str = None, source_class_name: str = None, field_identifier: str = None): """ Args: common_token_stream (CommonTokenStream): contains the program tokens package_name (str): The enclosing package of the field source_class_name (str): The enclosing class of the field field_identifier (str): The field name to be encapsulated Returns: object (DecreaseMethodVisibilityListener): An instance of EncapsulateFiledRefactoringListener """ self.token_stream = common_token_stream if package_name is None: self.package_name = '' else: self.package_name = package_name self.source_class_name = source_class_name self.field_identifier = field_identifier self.getter_exist = False self.setter_exist = False self.in_source_class = False self.in_selected_package = True if self.package_name == '' else False # Move all the tokens in the source code in a buffer, token_stream_rewriter. if common_token_stream is not None: self.token_stream_rewriter = \ TokenStreamRewriter(common_token_stream) else: raise TypeError('common_token_stream is None')
def __init__(self, common_token_stream: CommonTokenStream = None): """ :param common_token_stream: """ self.token_stream = common_token_stream # Move all the tokens in the source code in a buffer, token_stream_rewriter. if common_token_stream is not None: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) else: raise TypeError('common_token_stream is None')
class DeleteSourceListener(JavaParserLabeledListener): """ """ def __init__(self, common_token_stream: CommonTokenStream, source_method: str): """ """ self.token_stream_rewriter = TokenStreamRewriter(common_token_stream) self.source_method = source_method def enterMethodDeclaration( self, ctx: JavaParserLabeled.MethodDeclarationContext): if self.source_method == ctx.IDENTIFIER().getText(): self.token_stream_rewriter.replaceRange( from_idx=ctx.parentCtx.parentCtx.start.tokenIndex, to_idx=ctx.parentCtx.parentCtx.stop.tokenIndex, text="")
def enterMethodDeclaration( self, ctx: JavaParserLabeled.MethodDeclarationContext): """check if this is the intended method if so capture signature and remove boolean argument Args: ctx (JavaParserLabeled.MethodDeclarationContext): """ self.is_source_method = ( ctx.IDENTIFIER().getText() == self.source_method) if self.is_source_method: nextParam = None for idx, formalParameter in enumerate(ctx.formalParameters( ).formalParameterList().formalParameter()): if formalParameter.variableDeclaratorId().IDENTIFIER().getText( ) == self.argument_name: self.argument_token = formalParameter nextParam = ctx.formalParameters().formalParameterList().formalParameter()[idx + 1] \ if idx != len(ctx.formalParameters().formalParameterList().formalParameter()) - 1 else None break if nextParam: self.token_stream_rewriter.replaceRange( self.argument_token.start.tokenIndex, nextParam.start.tokenIndex - 1, '') else: self.token_stream_rewriter.replaceRange( self.argument_token.start.tokenIndex, self.argument_token.stop.tokenIndex, '') self.signature = self.token_stream_rewriter.getText( self.token_stream_rewriter.DEFAULT_PROGRAM_NAME, ctx.start.tokenIndex, ctx.methodBody().start.tokenIndex) if self.token_stream_rewriter_changed == False: self.token_stream_rewriter = TokenStreamRewriter( self.common_token_stream) self.token_stream_rewriter_changed = True
def __init__(self, common_token_stream: CommonTokenStream = None, source_class=None, field_name: str = None): """ To implement ِDecrease Field Visibility refactoring based on its actors. Detects the required field and decreases/changes its visibility status. Args: common_token_stream (CommonTokenStream): A stream of tokens generated by parsing the main file using the ANTLR parser generator source_class (str): Name of the class in which the refactoring has to be done field_name (str): Name of the field whose visibility status has to be changed Returns: No returns """ if field_name is None: self.field_name = "" else: self.field_name = field_name if source_class is None: self.source_class = "" else: self.source_class = source_class if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) self.is_source_class = False self.detected_field = None self.detected_method = None self.TAB = "\t" self.NEW_LINE = "\n" self.code = "" self.tempdeclarationcode = ""
class GetMethodTextPullUpMethodRefactoringListener(JavaParserLabeledListener): def __init__(self, common_token_stream: CommonTokenStream = None, child_class=None, moved_methods=None): if moved_methods is None: self.moved_methods = [] else: self.moved_methods = moved_methods if child_class is None: self.moved_methods = [] else: self.children_class = child_class if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter(common_token_stream) self.is_children_class = False self.detected_field = None self.detected_method = None self.TAB = "\t" self.NEW_LINE = "\n" self.code = "" self.tempdeclarationcode = "" self.method_text = "" def enterMethodDeclaration(self, ctx: JavaParserLabeled.MethodDeclarationContext): if self.is_children_class: method_identifier = ctx.IDENTIFIER().getText() if self.moved_methods == method_identifier: methodDefctx = ctx.parentCtx.parentCtx start_index = methodDefctx.start.tokenIndex stop_index = methodDefctx.stop.tokenIndex self.method_text = self.token_stream_rewriter.getText( program_name=self.token_stream_rewriter.DEFAULT_PROGRAM_NAME, start=start_index, stop=stop_index) else: return None def enterClassDeclaration(self, ctx: JavaParserLabeled.ClassDeclarationContext): print("children class in get text refactor:", self.children_class) class_identifier = ctx.IDENTIFIER().getText() if class_identifier in self.children_class: self.is_children_class = True else: print("enter other class") self.is_children_class = False
def testInsertBeforeTokenThenDeleteThatToken(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.insertBeforeIndex(1, 'foo') rewriter.replaceRange(1, 2, 'foo') self.assertEquals('afoofoo', rewriter.getDefaultText())
def testCombineInsertOnLeftWithDelete(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.delete('default', 0, 2) rewriter.insertBeforeIndex(0, 'z') self.assertEquals('z', rewriter.getDefaultText())
def testLeaveAloneDisjointInsert2(self): input = InputStream('abcc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(2, 3, 'foo') rewriter.insertBeforeIndex(1, 'x') self.assertEquals('axbfoo', rewriter.getDefaultText())
def testDropPrevCoveredInsert(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.insertBeforeIndex(1, 'foo') rewriter.replaceRange(1, 2, 'foo') self.assertEquals('afoofoo', rewriter.getDefaultText())
def testReplaceThenInsertAfterLastIndex(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceIndex(2, 'x') rewriter.insertAfter(2, 'y') self.assertEqual('abxy', rewriter.getDefaultText())
def testInsertThenReplaceSameIndex(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.insertBeforeIndex(0, '0') rewriter.replaceIndex(0, 'x') self.assertEqual('0xbc', rewriter.getDefaultText())
def testLeaveAloneDisjointInsert2(self): input = InputStream('abcc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(2, 3, 'foo') rewriter.insertBeforeIndex(1, 'x') self.assertEqual('axbfoo', rewriter.getDefaultText())
def testInsertBeforeTokenThenDeleteThatToken(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.insertBeforeIndex(1, 'foo') rewriter.replaceRange(1, 2, 'foo') self.assertEqual('afoofoo', rewriter.getDefaultText())
def testReplaceRangeThenInsertAfterRightEdge(self): input = InputStream('abcccba') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(2, 4, 'x') rewriter.insertAfter(4, 'y') self.assertEqual('abxyba', rewriter.getDefaultText())
def testReplaceSingleMiddleThenOverlappingSuperset(self): input = InputStream('abcba') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceIndex(2, 'xyz') rewriter.replaceRange(0, 3, 'foo') self.assertEqual('fooa', rewriter.getDefaultText())
def testCombineInsertOnLeftWithDelete(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.delete('default', 0, 2) rewriter.insertBeforeIndex(0, 'z') self.assertEqual('z', rewriter.getDefaultText())
def testCombineInsertOnLeftWithReplace(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(0, 2, 'foo') rewriter.insertBeforeIndex(0, 'z') self.assertEqual('zfoo', rewriter.getDefaultText())
def test2ReplaceMiddleIndex1InsertBefore(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.insertBeforeIndex(0, "_") rewriter.replaceIndex(1, 'x') rewriter.replaceIndex(1, 'y') self.assertEqual('_ayc', rewriter.getDefaultText())
def testDisjointInserts(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.insertBeforeIndex(1, 'x') rewriter.insertBeforeIndex(2, 'y') rewriter.insertBeforeIndex(0, 'z') self.assertEquals('zaxbyc', rewriter.getDefaultText())
def testReplaceThenDeleteMiddleIndex(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(0, 2, 'x') rewriter.insertBeforeIndex(1, '0') with self.assertRaises(ValueError) as ctx: rewriter.getDefaultText() self.assertEqual( 'insert op <InsertBeforeOp@[@1,1:1=\'b\',<2>,1:1]:"0"> within boundaries of previous <ReplaceOp@[@0,0:0=\'a\',<1>,1:0]..[@2,2:2=\'c\',<3>,1:2]:"x">', str(ctx.exception) )
def testReplaceRangeThenInsertAtRightEdge(self): input = InputStream('abcccba') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(2, 4, 'x') rewriter.insertBeforeIndex(4, 'y') with self.assertRaises(ValueError) as ctx: rewriter.getDefaultText() msg = str(ctx.exception) self.assertEqual( "insert op <InsertBeforeOp@[@4,4:4='c',<3>,1:4]:\"y\"> within boundaries of previous <ReplaceOp@[@2,2:2='c',<3>,1:2]..[@4,4:4='c',<3>,1:4]:\"x\">", msg )
def testToStringStartStop2(self): input = InputStream('x = 3 * 0 + 2 * 0;') lexer = TestLexer2(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) self.assertEqual('x = 3 * 0 + 2 * 0;', rewriter.getDefaultText()) # replace 3 * 0 with 0 rewriter.replaceRange(4, 8, '0') self.assertEqual('x = 0 + 2 * 0;', rewriter.getDefaultText()) self.assertEqual('x = 0 + 2 * 0;', rewriter.getText('default', 0, 17)) self.assertEqual('0', rewriter.getText('default', 4, 8)) self.assertEqual('x = 0', rewriter.getText('default', 0, 8)) self.assertEqual('2 * 0', rewriter.getText('default', 12, 16)) rewriter.insertAfter(17, "// comment") self.assertEqual('2 * 0;// comment', rewriter.getText('default', 12, 18)) self.assertEqual('x = 0', rewriter.getText('default', 0, 8))