class MakeMethodStaticRefactoringListener(JavaParserLabeledListener): """ To implement extract class refactoring based on its actors. Creates a new class and move fields and methods from the old class to the new one """ def __init__(self, common_token_stream: CommonTokenStream = None, source_class=None, method_name: str = None): if method_name is None: self.method_name = "" else: self.method_name = method_name if source_class is None: self.source_class = "" else: self.source_class = source_class if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) self.is_source_class = False self.is_static = False def enterClassDeclaration(self, ctx: JavaParserLabeled.ClassDeclarationContext): class_identifier = ctx.IDENTIFIER().getText() if class_identifier == self.source_class: self.is_source_class = True else: self.is_source_class = False def exitMethodDeclaration(self, ctx: JavaParserLabeled.MethodDeclarationContext): if not self.is_source_class: return None grand_parent_ctx = ctx.parentCtx.parentCtx method_identifier = ctx.IDENTIFIER().getText() if self.method_name in method_identifier: if grand_parent_ctx.modifier() == []: self.token_stream_rewriter.replaceRange( from_idx=ctx.typeTypeOrVoid().start.tokenIndex, to_idx=ctx.typeTypeOrVoid().stop.tokenIndex, text='static ' + ctx.typeTypeOrVoid().getText()) else: for i in range(0, len(grand_parent_ctx.modifier())): if grand_parent_ctx.modifier(i).getText() == "static": self.is_static = True break if not self.is_static: self.token_stream_rewriter.replaceRange( from_idx=grand_parent_ctx.modifier(0).start.tokenIndex, to_idx=grand_parent_ctx.modifier(0).stop.tokenIndex, text=grand_parent_ctx.modifier(0).getText() + ' static')
class MakeMethodNonFinalRefactoringListener(JavaParserLabeledListener): """ To implement Make Method Non-Final refactoring based on its actors. """ def __init__(self, common_token_stream: CommonTokenStream = None, source_class=None, method_name: str = None): """ """ if method_name is None: self.method_name = "" else: self.method_name = method_name if source_class is None: self.source_class = "" else: self.source_class = source_class if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) self.is_source_class = False self.is_final = False def enterClassDeclaration(self, ctx: JavaParserLabeled.ClassDeclarationContext): class_identifier = ctx.IDENTIFIER().getText() if class_identifier == self.source_class: self.is_source_class = True else: self.is_source_class = False def exitMethodDeclaration(self, ctx: JavaParserLabeled.MethodDeclarationContext): if not self.is_source_class: return None grand_parent_ctx = ctx.parentCtx.parentCtx method_identifier = ctx.IDENTIFIER().getText() if self.method_name in method_identifier: if not (grand_parent_ctx.modifier() == []): for i in range(0, len(grand_parent_ctx.modifier())): if grand_parent_ctx.modifier(i).getText() == "final": self.is_final = True break if self.is_final: self.token_stream_rewriter.replaceRange( from_idx=grand_parent_ctx.modifier(i).start.tokenIndex, to_idx=grand_parent_ctx.modifier(i).stop.tokenIndex, text='')
class MakeFieldNonStaticRefactoringListener(JavaParserLabeledListener): """ To implement Make static field non-static refactoring operation based on its actors. """ def __init__(self, common_token_stream: CommonTokenStream = None, source_class=None, field_name: str = None): """ """ if field_name is None: self.field_name = "" else: self.field_name = field_name if source_class is None: self.source_class = "" else: self.source_class = source_class if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter(common_token_stream) self.is_source_class = False self.is_static = False def enterClassDeclaration(self, ctx: JavaParserLabeled.ClassDeclarationContext): class_identifier = ctx.IDENTIFIER().getText() if class_identifier == self.source_class: self.is_source_class = True else: self.is_source_class = False def exitFieldDeclaration(self, ctx: JavaParserLabeled.FieldDeclarationContext): if not self.is_source_class: return None grand_parent_ctx = ctx.parentCtx.parentCtx # field_identifier = ctx.variableDeclarators().getText().split(",") field_identifier = ctx.variableDeclarators().variableDeclarator(0).variableDeclaratorId().IDENTIFIER().getText() if self.field_name in field_identifier: i = 0 if not (grand_parent_ctx.modifier() == []): for i in range(0, len(grand_parent_ctx.modifier())): if grand_parent_ctx.modifier(i).getText() == "static": self.is_static = True break if self.is_static: self.token_stream_rewriter.replaceRange( from_idx=grand_parent_ctx.modifier(i).start.tokenIndex, to_idx=grand_parent_ctx.modifier(i).stop.tokenIndex, text='' )
def testReplaceAll(self): input = InputStream('abcccba') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(0, 6, 'x') self.assertEqual('x', rewriter.getDefaultText())
def testReplaceSubsetThenFetch(self): input = InputStream('abcccba') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(2, 4, 'xyz') self.assertEqual('abxyzba', rewriter.getDefaultText())
def testReplaceAll(self): input = InputStream('abcccba') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(0, 6, 'x') self.assertEquals('x', rewriter.getDefaultText())
def testReplaceSubsetThenFetch(self): input = InputStream('abcccba') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(2, 4, 'xyz') self.assertEquals('abxyzba', rewriter.getDefaultText())
def testDropPrevCoveredInsert(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.insertBeforeIndex(1, 'foo') rewriter.replaceRange(1, 2, 'foo') self.assertEquals('afoofoo', rewriter.getDefaultText())
def testReplaceRangeThenInsertAfterRightEdge(self): input = InputStream('abcccba') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(2, 4, 'x') rewriter.insertAfter(4, 'y') self.assertEqual('abxyba', rewriter.getDefaultText())
def testLeaveAloneDisjointInsert2(self): input = InputStream('abcc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(2, 3, 'foo') rewriter.insertBeforeIndex(1, 'x') self.assertEqual('axbfoo', rewriter.getDefaultText())
def testCombineInsertOnLeftWithReplace(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(0, 2, 'foo') rewriter.insertBeforeIndex(0, 'z') self.assertEquals('zfoo', rewriter.getDefaultText())
def testReplaceSingleMiddleThenOverlappingSuperset(self): input = InputStream('abcba') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceIndex(2, 'xyz') rewriter.replaceRange(0, 3, 'foo') self.assertEquals('fooa', rewriter.getDefaultText())
def testReplaceRangeThenInsertAfterRightEdge(self): input = InputStream('abcccba') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(2, 4, 'x') rewriter.insertAfter(4, 'y') self.assertEquals('abxyba', rewriter.getDefaultText())
def testCombineInsertOnLeftWithReplace(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(0, 2, 'foo') rewriter.insertBeforeIndex(0, 'z') self.assertEqual('zfoo', rewriter.getDefaultText())
def testOverlappingReplace4(self): input = InputStream('abcc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(1, 2, 'foo') rewriter.replaceRange(1, 3, 'bar') self.assertEquals('abar', rewriter.getDefaultText())
def testDropPrevCoveredInsert(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.insertBeforeIndex(1, 'foo') rewriter.replaceRange(1, 2, 'foo') self.assertEqual('afoofoo', rewriter.getDefaultText())
def testDropIdenticalReplace(self): input = InputStream('abcc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(1, 2, 'foo') rewriter.replaceRange(1, 2, 'foo') self.assertEquals('afooc', rewriter.getDefaultText())
def testInsertBeforeTokenThenDeleteThatToken(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.insertBeforeIndex(1, 'foo') rewriter.replaceRange(1, 2, 'foo') self.assertEquals('afoofoo', rewriter.getDefaultText())
def testLeaveAloneDisjointInsert2(self): input = InputStream('abcc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(2, 3, 'foo') rewriter.insertBeforeIndex(1, 'x') self.assertEquals('axbfoo', rewriter.getDefaultText())
def testReplaceSingleMiddleThenOverlappingSuperset(self): input = InputStream('abcba') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceIndex(2, 'xyz') rewriter.replaceRange(0, 3, 'foo') self.assertEqual('fooa', rewriter.getDefaultText())
def testInsertBeforeTokenThenDeleteThatToken(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.insertBeforeIndex(1, 'foo') rewriter.replaceRange(1, 2, 'foo') self.assertEqual('afoofoo', rewriter.getDefaultText())
def testDropIdenticalReplace(self): input = InputStream('abcc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(1, 2, 'foo') rewriter.replaceRange(1, 2, 'foo') self.assertEqual('afooc', rewriter.getDefaultText())
def testOverlappingReplace4(self): input = InputStream('abcc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(1, 2, 'foo') rewriter.replaceRange(1, 3, 'bar') self.assertEqual('abar', rewriter.getDefaultText())
def testToStringStartStop(self): input = InputStream('x = 3 * 0;') lexer = TestLexer2(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(4, 8, '0') self.assertEquals(rewriter.getDefaultText(), 'x = 0;') self.assertEquals(rewriter.getText('default', 0, 9), 'x = 0;') self.assertEquals(rewriter.getText('default', 4, 8), '0')
def testToStringStartStop(self): input = InputStream('x = 3 * 0;') lexer = TestLexer2(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(4, 8, '0') self.assertEqual(rewriter.getDefaultText(), 'x = 0;') self.assertEqual(rewriter.getText('default', 0, 9), 'x = 0;') self.assertEqual(rewriter.getText('default', 4, 8), '0')
class DeleteSourceListener(JavaParserLabeledListener): def __init__(self, common_token_stream: CommonTokenStream, source_method: str): self.token_stream_rewriter = TokenStreamRewriter(common_token_stream) self.source_method = source_method def enterMethodDeclaration( self, ctx: JavaParserLabeled.MethodDeclarationContext): if self.source_method == ctx.IDENTIFIER().getText(): self.token_stream_rewriter.replaceRange( from_idx=ctx.parentCtx.parentCtx.start.tokenIndex, to_idx=ctx.parentCtx.parentCtx.stop.tokenIndex, text="")
def testReplaceThenDeleteMiddleIndex(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(0, 2, 'x') rewriter.insertBeforeIndex(1, '0') with self.assertRaises(ValueError) as ctx: rewriter.getDefaultText() self.assertEquals( 'insert op <InsertBeforeOp@[@1,1:1=\'b\',<2>,1:1]:"0"> within boundaries of previous <ReplaceOp@[@0,0:0=\'a\',<1>,1:0]..[@2,2:2=\'c\',<3>,1:2]:"x">', ctx.exception.message)
def testReplaceThenDeleteMiddleIndex(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(0, 2, 'x') rewriter.insertBeforeIndex(1, '0') with self.assertRaises(ValueError) as ctx: rewriter.getDefaultText() self.assertEqual( 'insert op <InsertBeforeOp@[@1,1:1=\'b\',<2>,1:1]:"0"> within boundaries of previous <ReplaceOp@[@0,0:0=\'a\',<1>,1:0]..[@2,2:2=\'c\',<3>,1:2]:"x">', str(ctx.exception) )
def testReplaceThenReplaceLowerIndexedSuperset(self): input = InputStream('abcccba') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(2, 4, 'xyz') rewriter.replaceRange(1, 3, 'foo') with self.assertRaises(ValueError) as ctx: rewriter.getDefaultText() msg = ctx.exception.message self.assertEquals( """replace op boundaries of <ReplaceOp@[@1,1:1='b',<2>,1:1]..[@3,3:3='c',<3>,1:3]:"foo"> overlap with previous <ReplaceOp@[@2,2:2='c',<3>,1:2]..[@4,4:4='c',<3>,1:4]:"xyz">""", msg)
def testOverlappingReplace2(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(0, 3, 'bar') rewriter.replaceRange(1, 2, 'foo') with self.assertRaises(ValueError) as ctx: rewriter.getDefaultText() self.assertEquals( """replace op boundaries of <ReplaceOp@[@1,1:1='b',<2>,1:1]..[@2,2:2='c',<3>,1:2]:"foo"> overlap with previous <ReplaceOp@[@0,0:0='a',<1>,1:0]..[@3,3:2='<EOF>',<-1>,1:3]:"bar">""", ctx.exception.message)
def testReplaceRangeThenInsertAtRightEdge(self): input = InputStream('abcccba') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(2, 4, 'x') rewriter.insertBeforeIndex(4, 'y') with self.assertRaises(ValueError) as ctx: rewriter.getDefaultText() msg = ctx.exception.message self.assertEquals( "insert op <InsertBeforeOp@[@4,4:4='c',<3>,1:4]:\"y\"> within boundaries of previous <ReplaceOp@[@2,2:2='c',<3>,1:2]..[@4,4:4='c',<3>,1:4]:\"x\">", msg)
class MakeNonFinalClassRefactoringListener(JavaParserLabeledListener): """ To implement Make Class Non-Final refactoring based on its actors. """ def __init__(self, common_token_stream: CommonTokenStream = None, class_name: str = None): """ """ if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) if class_name is None: raise ValueError("source_class is None") else: self.objective_class = class_name self.is_objective_class = False self.detected_field = None self.detected_method = None self.TAB = "\t" self.NEW_LINE = "\n" self.code = "" def enterTypeDeclaration(self, ctx: JavaParserLabeled.TypeDeclarationContext): if self.objective_class == ctx.classDeclaration().IDENTIFIER().getText( ): # modifier=ctx.getText().split(",") is_fanal = False for i in range(0, len(ctx.classOrInterfaceModifier())): if ctx.classOrInterfaceModifier(i).getText() == "final": self.token_stream_rewriter.replaceRange( from_idx=ctx.classOrInterfaceModifier( i).start.tokenIndex, to_idx=ctx.classOrInterfaceModifier(i).stop.tokenIndex, text="")
def testReplaceThenReplaceLowerIndexedSuperset(self): input = InputStream('abcccba') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(2, 4, 'xyz') rewriter.replaceRange(1, 3, 'foo') with self.assertRaises(ValueError) as ctx: rewriter.getDefaultText() msg = str(ctx.exception) self.assertEqual( """replace op boundaries of <ReplaceOp@[@1,1:1='b',<2>,1:1]..[@3,3:3='c',<3>,1:3]:"foo"> overlap with previous <ReplaceOp@[@2,2:2='c',<3>,1:2]..[@4,4:4='c',<3>,1:4]:"xyz">""", msg )
def testOverlappingReplace2(self): input = InputStream('abc') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(0, 3, 'bar') rewriter.replaceRange(1, 2, 'foo') with self.assertRaises(ValueError) as ctx: rewriter.getDefaultText() self.assertEqual( """replace op boundaries of <ReplaceOp@[@1,1:1='b',<2>,1:1]..[@2,2:2='c',<3>,1:2]:"foo"> overlap with previous <ReplaceOp@[@0,0:0='a',<1>,1:0]..[@3,3:2='<EOF>',<-1>,1:3]:"bar">""", str(ctx.exception) )
def testReplaceRangeThenInsertAtRightEdge(self): input = InputStream('abcccba') lexer = TestLexer(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) rewriter.replaceRange(2, 4, 'x') rewriter.insertBeforeIndex(4, 'y') with self.assertRaises(ValueError) as ctx: rewriter.getDefaultText() msg = str(ctx.exception) self.assertEqual( "insert op <InsertBeforeOp@[@4,4:4='c',<3>,1:4]:\"y\"> within boundaries of previous <ReplaceOp@[@2,2:2='c',<3>,1:2]..[@4,4:4='c',<3>,1:4]:\"x\">", msg )
class MakeAbstractClassRefactoringListener(JavaParserLabeledListener): """ To implement Make Class Abstract refactoring based on its actors. """ def __init__(self, common_token_stream: CommonTokenStream = None, class_name: str = None): """ """ if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) if class_name is None: raise ValueError("source_class is None") else: self.objective_class = class_name self.is_objective_class = False self.detected_field = None self.detected_method = None self.TAB = "\t" self.NEW_LINE = "\n" self.code = "" def enterClassDeclaration(self, ctx: JavaParserLabeled.ClassDeclarationContext): print(ctx.IDENTIFIER().getText()) if self.objective_class == ctx.IDENTIFIER().getText(): print("&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&") print(ctx.CLASS().getText()) self.token_stream_rewriter.replaceRange(from_idx=0, to_idx=0, text="abstract " + ctx.CLASS().getText())
def testToStringStartStop2(self): input = InputStream('x = 3 * 0 + 2 * 0;') lexer = TestLexer2(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) self.assertEqual('x = 3 * 0 + 2 * 0;', rewriter.getDefaultText()) # replace 3 * 0 with 0 rewriter.replaceRange(4, 8, '0') self.assertEqual('x = 0 + 2 * 0;', rewriter.getDefaultText()) self.assertEqual('x = 0 + 2 * 0;', rewriter.getText('default', 0, 17)) self.assertEqual('0', rewriter.getText('default', 4, 8)) self.assertEqual('x = 0', rewriter.getText('default', 0, 8)) self.assertEqual('2 * 0', rewriter.getText('default', 12, 16)) rewriter.insertAfter(17, "// comment") self.assertEqual('2 * 0;// comment', rewriter.getText('default', 12, 18)) self.assertEqual('x = 0', rewriter.getText('default', 0, 8))
def testToStringStartStop2(self): input = InputStream('x = 3 * 0 + 2 * 0;') lexer = TestLexer2(input) stream = CommonTokenStream(lexer=lexer) stream.fill() rewriter = TokenStreamRewriter(tokens=stream) self.assertEquals('x = 3 * 0 + 2 * 0;', rewriter.getDefaultText()) # replace 3 * 0 with 0 rewriter.replaceRange(4, 8, '0') self.assertEquals('x = 0 + 2 * 0;', rewriter.getDefaultText()) self.assertEquals('x = 0 + 2 * 0;', rewriter.getText('default', Interval(0, 17))) self.assertEquals('0', rewriter.getText('default', Interval(4, 8))) self.assertEquals('x = 0', rewriter.getText('default', Interval(0, 8))) self.assertEquals('2 * 0', rewriter.getText('default', Interval(12, 16))) rewriter.insertAfter(17, "// comment") self.assertEquals('2 * 0;// comment', rewriter.getText('default', Interval(12, 18))) self.assertEquals('x = 0', rewriter.getText('default', Interval(0, 8)))
class IncreaseMethodVisibilityRefactoringListener(JavaParserLabeledListener): def __init__(self, common_token_stream: CommonTokenStream = None, source_class=None, method_name: str = None): if method_name is None: self.method_name = "" else: self.method_name = method_name if source_class is None: self.source_class = "" else: self.source_class = source_class if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) self.is_source_class = False def enterClassDeclaration(self, ctx: JavaParserLabeled.ClassDeclarationContext): class_identifier = ctx.IDENTIFIER().getText() if class_identifier == self.source_class: self.is_source_class = True else: self.is_source_class = False def exitMethodDeclaration(self, ctx: JavaParserLabeled.MethodDeclarationContext): if not self.is_source_class: return None grand_parent_ctx = ctx.parentCtx.parentCtx method_identifier = ctx.IDENTIFIER().getText() # print("method_identifier",method_identifier) if self.method_name in method_identifier: # print("&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&") if grand_parent_ctx.modifier() == []: self.token_stream_rewriter.replaceRange( from_idx=ctx.typeTypeOrVoid().start.tokenIndex, to_idx=ctx.typeTypeOrVoid().stop.tokenIndex, text='private ' + ctx.typeTypeOrVoid().getText()) elif grand_parent_ctx.modifier(0).getText() == 'public': self.token_stream_rewriter.replaceRange( from_idx=grand_parent_ctx.modifier(0).start.tokenIndex, to_idx=grand_parent_ctx.modifier(0).stop.tokenIndex, text='private') elif grand_parent_ctx.modifier(0).getText() != 'private': self.token_stream_rewriter.replaceRange( from_idx=grand_parent_ctx.modifier(0).start.tokenIndex, to_idx=grand_parent_ctx.modifier(0).stop.tokenIndex, text='private ' + grand_parent_ctx.modifier(0).getText())
class MakeFinalClassRefactoringListener(JavaParserLabeledListener): """ To implement extract class refactoring based on its actors. Creates a new class and move fields and methods from the old class to the new one """ def __init__(self, common_token_stream: CommonTokenStream = None, class_name: str = None): if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) if class_name is None: raise ValueError("source_class is None") else: self.objective_class = class_name self.is_objective_class = False self.detected_field = None self.detected_method = None self.TAB = "\t" self.NEW_LINE = "\n" self.code = "" def enterClassDeclaration(self, ctx: JavaParserLabeled.ClassDeclarationContext): if self.objective_class == ctx.IDENTIFIER().getText(): self.token_stream_rewriter.replaceRange(from_idx=0, to_idx=0, text="final " + ctx.CLASS().getText())
class EncapsulateFiledRefactoringListener(Java9_v2Listener): """ To implement the encapsulate filed refactored Encapsulate field: Make a public field private and provide accessors """ def __init__(self, common_token_stream: CommonTokenStream = None, field_identifier: str = None): """ :param common_token_stream: """ self.token_stream = common_token_stream self.field_identifier = field_identifier # Move all the tokens in the source code in a buffer, token_stream_rewriter. if common_token_stream is not None: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) else: raise TypeError('common_token_stream is None') def exitFieldDeclaration(self, ctx: Java9_v2Parser.FieldDeclarationContext): if ctx.variableDeclaratorList().getText() == self.field_identifier: if ctx.fieldModifier(0).getText() == 'public': self.token_stream_rewriter.replaceRange( from_idx=ctx.fieldModifier(0).start.tokenIndex, to_idx=ctx.fieldModifier(0).stop.tokenIndex, text='private') # generate accessor and mutator methods # Accessor body new_code = '\n\t' new_code += 'public ' + ctx.unannType().getText( ) + ' get' + str.capitalize(self.field_identifier) new_code += '() { \n\t\t return this.' + self.field_identifier + ';' + '\n\t}' # Mutator body new_code += '\n\t' new_code += 'public void set' + str.capitalize( self.field_identifier) new_code += '(' + ctx.unannType().getText( ) + ' ' + self.field_identifier + ') { \n\t\t' new_code += 'this.' + self.field_identifier + ' = ' + self.field_identifier + ';' + '\n\t}\n' self.token_stream_rewriter.insertAfter(ctx.stop.tokenIndex, new_code) hidden = self.token_stream.getHiddenTokensToRight( ctx.stop.tokenIndex) self.token_stream_rewriter.replaceRange( from_idx=hidden[0].tokenIndex, to_idx=hidden[-1].tokenIndex, text='\t/*End of accessor and mutator methods!*/\n\n') def exitAssignment(self, ctx: Java9_v2Parser.AssignmentContext): if ctx.leftHandSide().getText() == self.field_identifier or \ ctx.leftHandSide().getText() == 'this.' + self.field_identifier: expr_code = self.token_stream_rewriter.getText( program_name=self.token_stream_rewriter.DEFAULT_PROGRAM_NAME, start=ctx.expression().start.tokenIndex, stop=ctx.expression().stop.tokenIndex) # new_code = 'this.set' + str.capitalize(self.field_identifier) + '(' + ctx.expression().getText() + ')' new_code = 'this.set' + str.capitalize( self.field_identifier) + '(' + expr_code + ')' self.token_stream_rewriter.replaceRange(ctx.start.tokenIndex, ctx.stop.tokenIndex, new_code) def exitPrimary(self, ctx: Java9_v2Parser.PrimaryContext): if ctx.getChildCount() == 2: if ctx.getText() == 'this.' + self.field_identifier or ctx.getText( ) == self.field_identifier: new_code = 'this.get' + str.capitalize( self.field_identifier) + '()' self.token_stream_rewriter.replaceRange( ctx.start.tokenIndex, ctx.stop.tokenIndex, new_code) def enterCompilationUnit1(self, ctx: Java9_v2Parser.CompilationUnit1Context): hidden = self.token_stream.getHiddenTokensToLeft(ctx.start.tokenIndex) self.token_stream_rewriter.replaceRange( from_idx=hidden[0].tokenIndex, to_idx=hidden[-1].tokenIndex, text='/*After refactoring (Refactored version)*/\n')
class ReplaceParameterWithQueryRefactoringListener(JavaParserLabeledListener): """ To implement replace parameter with query refactoring based on its actors. Find usages of target method and remove target parameters from these and add the removed parameters to top of target method. """ def __init__(self, common_token_stream: CommonTokenStream = None, target_class: str = None, target_method: str = None, target_parameters: list = None): if common_token_stream is None: raise ValueError("common_token_stream is None") else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) if target_class is None: raise ValueError("target class is None") else: self.target_class = target_class if target_method is None: raise ValueError("target method is None") else: self.target_method = target_method if target_parameters is None: self.target_parameters = [] else: self.target_parameters = target_parameters self.current_class = None self.current_method = None self.current_method_call = None self.target_method_ctx = None self.removed_expressions = [] self.all_local_variable_declarators = [] self.add_to_top_of_target_method = [] self.TAB = "\t" self.NEW_LINE = "\n" self.code = "" def enterClassDeclaration(self, ctx: JavaParserLabeled.ClassDeclarationContext): self.current_class = ctx.IDENTIFIER().getText() def exitClassDeclaration(self, ctx: JavaParserLabeled.ClassDeclarationContext): self.current_class = None def enterMethodDeclaration( self, ctx: JavaParserLabeled.MethodDeclarationContext): self.current_method = ctx.IDENTIFIER().getText() if self.current_method == self.target_method and self.current_class == self.target_class: self.target_method_ctx = ctx def exitMethodDeclaration(self, ctx: JavaParserLabeled.MethodDeclarationContext): self.exit_method_or_constructor() def enterConstructorDeclaration( self, ctx: JavaParserLabeled.ConstructorDeclarationContext): self.current_method = ctx.IDENTIFIER().getText() if self.current_method == self.target_method and self.current_class == self.target_class: self.target_method_ctx = ctx def exitConstructorDeclaration( self, ctx: JavaParserLabeled.ConstructorDeclarationContext): self.exit_method_or_constructor() def enterLocalVariableDeclaration( self, ctx: JavaParserLabeled.LocalVariableDeclarationContext): self.all_local_variable_declarators.append(ctx) def remove_expression_declaration(self, expression): for lvd in self.all_local_variable_declarators: flag = False vds = lvd.variableDeclarators() survived_vds = [] for i in range(len(vds.children)): if i % 2 == 0: vd = vds.children[i] if expression.getText() != vd.variableDeclaratorId( ).getText(): survived_vds.append(vd.getText()) else: self.add_to_top_of_target_method.append( vd.variableInitializer().getText()) flag = True if len(survived_vds) == 0: parent_ctx = lvd.parentCtx print(type(parent_ctx)) self.token_stream_rewriter.delete( program_name=self.token_stream_rewriter. DEFAULT_PROGRAM_NAME, from_idx=parent_ctx.start.tokenIndex, to_idx=parent_ctx.stop.tokenIndex) elif len(survived_vds) < (len(vds.children) + 1) // 2: self.token_stream_rewriter.replaceRange( from_idx=vds.start.tokenIndex, to_idx=vds.stop.tokenIndex, text=f"{', '.join(survived_vds)}") if flag: break def exit_method_or_constructor(self): for expression in self.removed_expressions: if type(expression) is JavaParserLabeled.Expression0Context and \ type(expression.primary()) is JavaParserLabeled.Primary4Context: self.remove_expression_declaration(expression) else: self.add_to_top_of_target_method.append(expression.getText()) self.removed_expressions = [] self.all_local_variable_declarators = [] self.current_method = None def enterMethodCall0(self, ctx: JavaParserLabeled.MethodCall0Context): self.current_method_call = ctx.IDENTIFIER().getText() def exitMethodCall0(self, ctx: JavaParserLabeled.MethodCall0Context): self.current_method_call = None def enterExpressionList(self, ctx: JavaParserLabeled.ExpressionListContext): if self.current_method_call == self.target_method: parameters = [] for i in range(len(ctx.children)): if i % 2 == 0: if ((i // 2) + 1) in self.target_parameters: self.removed_expressions.append(ctx.children[i]) else: parameters.append(ctx.children[i].getText()) self.token_stream_rewriter.replaceRange( from_idx=ctx.start.tokenIndex, to_idx=ctx.stop.tokenIndex, text=f"{', '.join(parameters)}") def exitCompilationUnit(self, ctx: JavaParserLabeled.CompilationUnitContext): if self.target_method_ctx is not None: ctx = self.target_method_ctx text = '' formal_parameter_list = ctx.formalParameters().formalParameterList( ) survived_parameters = [] for i in range(len(formal_parameter_list.children)): if i % 2 == 0: if ((i // 2) + 1) in self.target_parameters: parameter = formal_parameter_list.children[i] parameter_type = parameter.typeType().getText() parameter_vdi = parameter.variableDeclaratorId( ).getText() parameter_initializer = self.add_to_top_of_target_method[ 0] text += \ parameter_type + ' ' + parameter_vdi + ' = ' + parameter_initializer + \ ';' + self.NEW_LINE + self.TAB + self.TAB self.add_to_top_of_target_method.remove( parameter_initializer) else: parameter = formal_parameter_list.children[i] parameter_type = parameter.typeType().getText() parameter_vdi = parameter.variableDeclaratorId( ).getText() survived_parameters.append(parameter_type + ' ' + parameter_vdi) self.token_stream_rewriter.replaceRange( from_idx=formal_parameter_list.start.tokenIndex, to_idx=formal_parameter_list.stop.tokenIndex, text=f"{', '.join(survived_parameters)}") block_statement = ctx.methodBody().block().blockStatement()[0] self.token_stream_rewriter.insertAfter( index=block_statement.start.tokenIndex - 1, text=text)
class PropagationMakeConcreteClassRefactoringListener(JavaParserLabeledListener ): def __init__(self, common_token_stream: CommonTokenStream = None, Source_class=None, using_variable_name=None, used_method_name=None, propagated_class_name=None): if Source_class is None: self.source_class = [] else: self.source_class = Source_class if used_method_name is None: self.using_method_name = [] else: self.using_method_name = used_method_name if using_variable_name is None: self.using_variable_name = [] else: self.using_variable_name = using_variable_name if propagated_class_name is None: self.propagated_class_name = [] else: self.propagated_class_name = propagated_class_name if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) self.is_class = False self.TAB = "\t" self.NEW_LINE = "\n" self.object = "" def enterClassDeclaration(self, ctx: JavaParserLabeled.ClassDeclarationContext): print("Propagation started, please wait...") class_identifier = ctx.IDENTIFIER().getText() if class_identifier in self.propagated_class_name: self.is_class = True else: self.is_class = False if class_identifier in self.propagated_class_name: self.token_stream_rewriter.replaceRange( from_idx=ctx.start.tokenIndex, to_idx=ctx.typeType().stop.tokenIndex, text=ctx.CLASS().getText() + ' ' + ctx.IDENTIFIER().getText()) def enterClassBody(self, ctx: JavaParserLabeled.ClassBodyContext): if not self.is_class: return None self.object = 'obj' + str.capitalize(self.source_class) self.token_stream_rewriter.insertAfter( index=ctx.start.tokenIndex, text=self.NEW_LINE + self.TAB + self.TAB + self.source_class + ' ' + self.object + ' = ' + 'new ' + self.source_class + '(' + ')' + ';' + self.NEW_LINE, program_name=self.token_stream_rewriter.DEFAULT_PROGRAM_NAME) def enterVariableDeclarator( self, ctx: JavaParserLabeled.VariableDeclaratorContext): if not self.is_class: return None if ctx.variableDeclaratorId().IDENTIFIER().getText( ) in self.using_variable_name: count = ctx.getChildCount() if count == 3: self.token_stream_rewriter.insertBefore( index=ctx.variableInitializer().start.tokenIndex, text=self.object + '.', program_name=self.token_stream_rewriter. DEFAULT_PROGRAM_NAME) def enterExpression(self, ctx: JavaParserLabeled.ExpressionContext): if not self.is_class: return None if ctx != None: if ctx.methodCall() != None: if ctx.methodCall().IDENTIFIER().getText( ) in self.using_method_name: count = ctx.methodCall().getChildCount() if count == 3: self.token_stream_rewriter.insertBefore( index=ctx.start.tokenIndex, text=self.object + '.', program_name=self.token_stream_rewriter. DEFAULT_PROGRAM_NAME)
class VisitorPatternRefactoringListener(JavaParserLabeledListener): """ implement the visitor pattern refactoring """ def __init__(self, common_token_stream: CommonTokenStream = None, SuperClass_identifier: str = None, SubClass_identifier: list = None): """ :param common_token_stream: """ self.enter_class = False self.token_stream = common_token_stream self.SuperClass_identifier = SuperClass_identifier self.SubClass_identifier = SubClass_identifier self.InSuperClass = False self.InSubClass = False self.InMainClass = False self.CurrentCC = None self.Visitors = {} # Move all the tokens in the source code in a buffer, token_stream_rewriter. if common_token_stream is not None: self.token_stream_rewriter = TokenStreamRewriter(common_token_stream) else: raise TypeError('common_token_stream is None') def enterClassDeclaration(self, ctx: JavaParserLabeled.ClassDeclarationContext): if ctx.IDENTIFIER().getText() == self.SuperClass_identifier: self.InSuperClass = True elif ctx.IDENTIFIER().getText() in self.SubClass_identifier: self.InSubClass = True elif ctx.IDENTIFIER().getText() == "Main": self.InMainClass = True if ctx.EXTENDS().__str__() == "extends": # SubClass Headers Rename self.token_stream_rewriter.insertAfter(ctx.start.tokenIndex + 4, "implements") self.token_stream_rewriter.deleteIndex(ctx.start.tokenIndex + 4) def exitClassDeclaration(self, ctx: JavaParserLabeled.ClassDeclarationContext): if self.InSuperClass: # SuperClass Interface Make interface_text = ( "interface " + self.SuperClass_identifier + "\n{\n\tpublic void accept (Visitor" + self.SuperClass_identifier + " visitor);\n}" ) self.token_stream_rewriter.insertAfter(ctx.start.tokenIndex - 1, "\n" + interface_text + "\n") # SuperClass Visitor interface Make interface_text_vistor = "interface Visitor" + self.SuperClass_identifier + "\n{" index = 0 for item in self.Visitors: interface_text_vistor += "\n\t" + "public void " + "visit(" + self.SubClass_identifier[ index] + " " + item + ");" index += 1 interface_text_vistor += "\n}" self.token_stream_rewriter.insertAfter(ctx.start.tokenIndex - 1, "\n" + interface_text_vistor + "\n") # SuperClass DoVisitor Make newSC = ( "\nclass DoVisitor" + self.SuperClass_identifier + " implements Visitor" + self.SuperClass_identifier + "\n{" ) method_body = "" index = 0 # SuperClassDoVisitor Mathods Make for item in self.Visitors: method_body = str_list(self.Visitors[item]) method_body = "{\n\t" + method_body[2:-2] + "\n\t}" newSC += "\n\t" + "@Override\n\tpublic void visit(" + self.SubClass_identifier[ index] + " " + item + ")\n\t" + method_body index += 1 newSC += "\n}" self.token_stream_rewriter.replaceRange(ctx.start.tokenIndex, ctx.stop.tokenIndex, newSC) self.InSuperClass = False self.InSubClass = False self.InMainClass = False def enterMethodDeclaration(self, ctx: JavaParserLabeled.MethodDeclarationContext): # Extract Methods Name & Methods body of SuperClass if self.InSuperClass: new_class_name = ctx.IDENTIFIER().getText() new_method_body = ctx.methodBody().getText() self.Visitors[new_class_name] = [new_method_body] def exitClassBody(self, ctx: JavaParserLabeled.ClassBodyContext): if self.InSubClass: # Implement Mathod of SuperClass InterFace override_text = ( "@Override\n\tpublic void accept(Visitor" + self.SuperClass_identifier + " visitor)\n\t{\n\t\tvisitor.visit(this);\n\t}" ) self.token_stream_rewriter.insertAfter(ctx.start.tokenIndex, "\n\t" + override_text) def enterMethodCall0(self, ctx: JavaParserLabeled.MethodCall0Context): if self.InMainClass: # Modify Main Method if ctx.IDENTIFIER().getText() in self.Visitors: self.token_stream_rewriter.replaceRange( from_idx=ctx.start.tokenIndex, to_idx=ctx.stop.tokenIndex, text="accept(new DoVisitor" + self.SuperClass_identifier + "())")
class FindUsagesListener(JavaParserLabeledListener): def __init__(self, common_token_stream: CommonTokenStream = None, source_class: str = None, new_class: str = None, moved_fields=None, moved_methods=None, output_path: str = ""): if moved_methods is None: self.moved_methods = [] else: self.moved_methods = moved_methods if moved_fields is None: self.moved_fields = [] else: self.moved_fields = moved_fields if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) if source_class is None: raise ValueError("source_class is None") else: self.source_class = source_class if new_class is None: raise ValueError("new_class is None") else: self.new_class = new_class self.output_path = output_path self.is_source_class = False self.detected_field = None self.detected_method = None self.TAB = "\t" self.NEW_LINE = "\n" self.code = "" self.scope = [] self.aul = AllUsageList() def exitTypeTypeOrVoid(self, ctx: JavaParserLabeled.TypeTypeOrVoidContext): if ctx.getText() == self.source_class: self.token_stream_rewriter.replaceRange( from_idx=ctx.start.tokenIndex, to_idx=ctx.stop.tokenIndex, text=f"{self.new_class}") def exitFormalParameter(self, ctx: JavaParserLabeled.FormalParameterContext): if ctx.typeType().getText() == self.source_class: self.token_stream_rewriter.replaceRange( from_idx=ctx.typeType().start.tokenIndex, to_idx=ctx.typeType().stop.tokenIndex, text=f"{self.new_class}") def enterClassDeclaration(self, ctx: JavaParserLabeled.ClassDeclarationContext): self.scope.append(f"class:{ctx.IDENTIFIER().getText()}") def enterMethodDeclaration( self, ctx: JavaParserLabeled.MethodDeclarationContext): self.scope.append(f"method:{ctx.IDENTIFIER().getText()}") def exitClassDeclaration(self, ctx: JavaParserLabeled.ClassDeclarationContext): self.scope.pop() def exitMethodDeclaration(self, ctx: JavaParserLabeled.MethodDeclarationContext): self.scope.pop() def exitFieldDeclaration(self, ctx: JavaParserLabeled.FieldDeclarationContext): if ctx.typeType().getText() == self.source_class: self.aul.add_identifier( (ctx.variableDeclarators().variableDeclarator( 0).variableDeclaratorId().IDENTIFIER().getText(), self.scope)) def exitLocalVariableDeclaration( self, ctx: JavaParserLabeled.LocalVariableDeclarationContext): if (ctx.typeType().getText() == self.source_class): self.aul.add_identifier( (ctx.variableDeclarators().variableDeclarator( 0).variableDeclaratorId().IDENTIFIER().getText(), self.scope)) def exitExpression1(self, ctx: JavaParserLabeled.Expression1Context): # left_hand_side'.'right_hand_side ==> identifier.method | identifier.field right_hand_side = ctx.children[-1] left_hand_side = ctx.children[0] if type(left_hand_side) == JavaParserLabeled.Expression0Context: if type(right_hand_side) == tree.Tree.TerminalNodeImpl: if left_hand_side.getText() != 'this': self.aul.add_field_to_identifier( identifier=(left_hand_side.getText(), self.scope), field_name=right_hand_side.getText()) elif type(right_hand_side) == JavaParserLabeled.MethodCall0Context: if left_hand_side.getText() != 'this': self.aul.add_method_to_identifier( identifier=(left_hand_side.getText(), self.scope), method_name=right_hand_side.children[0].getText()) elif type(left_hand_side) == JavaParserLabeled.Expression1Context: if type(right_hand_side) == tree.Tree.TerminalNodeImpl: self.aul.add_field_to_identifier( identifier=(left_hand_side.children[-1].getText(), self.scope), field_name=right_hand_side.getText()) elif type(right_hand_side) == JavaParserLabeled.MethodCall0Context: self.aul.add_method_to_identifier( identifier=(left_hand_side.children[-1].getText(), self.scope), method_name=right_hand_side.children[0].getText())
class PropagationListener(JavaParserLabeledListener): def __init__(self, common_token_stream: CommonTokenStream = None, source_class: str = None, new_class: str = None, moved_fields=None, moved_methods=None, output_path: str = "", aul=None): if moved_methods is None: self.moved_methods = [] else: self.moved_methods = moved_methods if moved_fields is None: self.moved_fields = [] else: self.moved_fields = moved_fields if common_token_stream is None: raise ValueError('common_token_stream is None') else: self.token_stream_rewriter = TokenStreamRewriter( common_token_stream) if source_class is None: raise ValueError("source_class is None") else: self.source_class = source_class if new_class is None: raise ValueError("new_class is None") else: self.new_class = new_class self.output_path = output_path self.is_source_class = False self.detected_field = None self.detected_method = None self.TAB = "\t" self.NEW_LINE = "\n" self.code = "" self.scope = [] self.aul = aul def intersection(self, lst1, lst2): lst3 = [value for value in lst1 if value in lst2] return lst3 def enterClassDeclaration(self, ctx: JavaParserLabeled.ClassDeclarationContext): self.scope.append(f"class:{ctx.IDENTIFIER().getText()}") def enterMethodDeclaration( self, ctx: JavaParserLabeled.MethodDeclarationContext): self.scope.append(f"method:{ctx.IDENTIFIER().getText()}") def exitClassDeclaration(self, ctx: JavaParserLabeled.ClassDeclarationContext): self.scope.pop() def exitMethodDeclaration(self, ctx: JavaParserLabeled.MethodDeclarationContext): self.scope.pop() def exitFieldDeclaration(self, ctx: JavaParserLabeled.FieldDeclarationContext): if ctx.typeType().getText() == self.source_class: flag = False for child in ctx.variableDeclarators().children: if child.getText() != ',': id = child.variableDeclaratorId().IDENTIFIER().getText() fields_used = self.aul.get_identifier_fields( (id, self.scope)) methods_used = self.aul.get_identifier_methods( (id, self.scope)) if len(self.intersection( fields_used, self.moved_fields)) > 0 or len( self.intersection(methods_used, self.moved_methods)) > 0: flag = True if flag == True: self.token_stream_rewriter.replaceRange( from_idx=ctx.typeType().start.tokenIndex, to_idx=ctx.typeType().stop.tokenIndex, text=f"{self.new_class}") for child in ctx.variableDeclarators().children: if child.getText() != ',': if type(child.children[-1]) == JavaParserLabeled.VariableInitializer1Context and \ type(child.children[-1].children[0]) == JavaParserLabeled.Expression4Context and \ child.children[-1].children[0].children[0].getText() == 'new' and \ len(child.children[-1].children[0].children) > 1 and \ type(child.children[-1].children[0].children[1]) == JavaParserLabeled.Creator1Context: if child.variableInitializer().expression( ).creator().createdName().getText( ) == self.source_class: self.token_stream_rewriter.replaceRange( from_idx=child. variableInitializer().expression().creator( ).createdName().start.tokenIndex, to_idx=child.variableInitializer( ).expression().creator().createdName( ).stop.tokenIndex, text=f"{self.new_class}") def exitLocalVariableDeclaration( self, ctx: JavaParserLabeled.LocalVariableDeclarationContext): if ctx.typeType().getText() == self.source_class: flag = False for child in ctx.variableDeclarators().children: if child.getText() != ',': id = child.variableDeclaratorId().IDENTIFIER().getText() fields_used = self.aul.get_identifier_fields( (id, self.scope)) methods_used = self.aul.get_identifier_methods( (id, self.scope)) if len(self.intersection( fields_used, self.moved_fields)) > 0 or len( self.intersection(methods_used, self.moved_methods)) > 0: flag = True if flag == True: self.token_stream_rewriter.replaceRange( from_idx=ctx.typeType().start.tokenIndex, to_idx=ctx.typeType().stop.tokenIndex, text=f"{self.new_class}") for child in ctx.variableDeclarators().children: if child.getText() != ',': if type(child.children[-1]) == JavaParserLabeled.VariableInitializer1Context and \ type(child.children[-1].children[0]) == JavaParserLabeled.Expression4Context and \ child.children[-1].children[0].children[0].getText() == 'new' and \ len(child.children[-1].children[0].children) > 1 and \ type(child.children[-1].children[0].children[1]) == JavaParserLabeled.Creator1Context: if child.variableInitializer().expression( ).creator().createdName().getText( ) == self.source_class: self.token_stream_rewriter.replaceRange( from_idx=child. variableInitializer().expression().creator( ).createdName().start.tokenIndex, to_idx=child.variableInitializer( ).expression().creator().createdName( ).stop.tokenIndex, text=f"{self.new_class}")