def analyze(self, oFile):

        lToi = oFile.get_tokens_bounded_by(self.left_token, self.right_token)
        for oToi in lToi:
            iLine, lTokens = utils.get_toi_parameters(oToi)

            if not utils.does_token_type_exist_in_list_of_tokens(parser.carriage_return, lTokens):
                continue

            if not utils.does_token_start_line(len(lTokens) - 1, lTokens):
                continue

            iStartIndex = oToi.get_start_index()
            iEndIndex = oToi.get_end_index()

            iRightColumn = oFile.get_column_of_token_index(iStartIndex)
            iLeftColumn = oFile.get_column_of_token_index(iEndIndex)

            if iRightColumn + 1 != iLeftColumn:
                iLineNumber = iLine + utils.count_token_types_in_list_of_tokens(parser.carriage_return, lTokens)
                sSolution = 'Move ' + lTokens[-1].get_value() + ' to column ' + str(iRightColumn)
                dAction = {}
                if iLeftColumn == 1:
                    dAction['action'] = 'insert'
                else:
                    dAction['action'] = 'adjust'
                dAction['column'] = iRightColumn
                oViolation = violation.New(iLineNumber, oToi, sSolution)
                oViolation.set_action(dAction)
                self.add_violation(oViolation)
示例#2
0
    def analyze(self, oFile):
        aToi = oFile.get_tokens_bounded_by(
            token.generic_clause.close_parenthesis, parser.carriage_return)
        lToi = oFile.get_token_and_n_tokens_before_it(
            [token.generic_clause.close_parenthesis], 2)
        for iToi, oToi in enumerate(lToi):

            lTokens = oToi.get_tokens()

            if isinstance(lTokens[0], parser.carriage_return) or isinstance(
                    lTokens[1], parser.carriage_return):
                continue

            sSolution = self.solution
            dAction = {}

            if utils.does_token_type_exist_in_list_of_tokens(
                    parser.comment, aToi[iToi].get_tokens()):
                lNewTokens = aToi[iToi].get_tokens()
                for iToken, oToken in enumerate(lNewTokens):
                    if isinstance(oToken, parser.comment):
                        dAction['action'] = 'move'
                        if isinstance(lNewTokens[iToken - 1],
                                      parser.whitespace):
                            dAction['index'] = iToken - 2
                        else:
                            dAction['index'] = iToken - 1
                        break
            else:
                dAction['action'] = 'insert'

            oViolation = violation.New(aToi[iToi].get_line_number(),
                                       aToi[iToi], sSolution)
            oViolation.set_action(dAction)
            self.add_violation(oViolation)
    def _analyze(self, lToi):
        if self.action == 'remove':
            for oToi in lToi:
                sSolution = self.action.capitalize() + ' ' + self.solution
                self.add_violation(
                    violation.New(oToi.get_line_number(), oToi, sSolution))
            return

        for oToi in lToi:

            iLine, lTokens = utils.get_toi_parameters(oToi)

            if utils.does_token_type_exist_in_list_of_tokens(
                    type(self.oInsertToken), lTokens):
                continue

            for iToken, oToken in enumerate(lTokens):
                iLine = utils.increment_line_number(iLine, oToken)
                for oSearch in self.lAnchorTokens:
                    if isinstance(oToken, oSearch):
                        iIndex = iToken
                        iLineNumber = iLine

            sSolution = self.action.capitalize() + ' ' + self.solution
            oViolation = violation.New(iLineNumber,
                                       oToi.extract_tokens(iIndex, iIndex),
                                       sSolution)
            self.add_violation(oViolation)
示例#4
0
 def _analyze(self, lToi):
     for oToi in lToi:
         iLine, lTokens = utils.get_toi_parameters(oToi)
         if not utils.does_token_type_exist_in_list_of_tokens(self.oMoveToken, lTokens):
             continue
         dAction = {}
         bPassing = False
         for iToken, oToken in enumerate(lTokens):
             iLine = utils.increment_line_number(iLine, oToken)
             for oAnchorToken in self.lAnchorTokens:
                 if isinstance(oToken, oAnchorToken):
                     dAction['insert'] = iToken + 1
                     sAnchorToken = oToken.get_value()
                     iAnchorLine = iLine
                     if utils.are_next_consecutive_token_types([parser.whitespace, self.oMoveToken], iToken + 1, lTokens):
                         bPassing = True
                         break
                     elif isinstance(lTokens[iToken + 1], self.oMoveToken):
                         bPassing = True
                         break
             if isinstance(oToken, self.oMoveToken):
                 iAnchorLine = iLine
                 dAction['move_index'] = iToken
                 sSolution = 'Move "' + oToken.get_value() + '" on line ' + str(iLine) + ' to the right of "' + sAnchorToken + '" on line ' + str(iAnchorLine)
             if bPassing:
                 break
         else:
             oViolation = violation.New(iAnchorLine, oToi, sSolution)
             oViolation.set_action(dAction)
             oViolation.set_remap()
             self.add_violation(oViolation)
def check_if_override_exists(oFile, iLine, lOverrides):
    oMyToi = oFile.get_tokens_from_line(iLine + 1)
    try:
        lTokens = oMyToi.get_tokens()
        for oOverride in lOverrides:
            if utils.does_token_type_exist_in_list_of_tokens(oOverride, lTokens):
                return True
        return False
    except AttributeError:
        return False
示例#6
0
    def analyze(self, oFile):
        lToi = oFile.get_tokens_bounded_by(self.lAnchorTokens[0], self.oEndToken)
        for oToi in lToi:

            iLine, lTokens = utils.get_toi_parameters(oToi)

            if utils.does_token_type_exist_in_list_of_tokens(type(self.oInsertToken), lTokens):
                continue

            dAction = {}
            for iToken, oToken in enumerate(lTokens):
                iLine = utils.increment_line_number(iLine, oToken) 
                for oSearch in self.lAnchorTokens:
                    if isinstance(oToken, oSearch):
                        iIndex = iToken
                        iLineNumber = iLine
                        sToken = oToken.get_value()

            sSolution = 'Add *is* keyword to the right of ' + sToken
            oViolation = violation.New(iLineNumber, oToi.extract_tokens(iIndex, iIndex), sSolution)
            self.add_violation(oViolation)