コード例 #1
0
ファイル: rule_029.py プロジェクト: pkorpine/vhdl-style-guide
 def _get_tokens_of_interest(self, oFile):
     lToi = []
     aToi = oFile.get_tokens_bounded_by_token_when_between_tokens(lElsifBoundingTokens[0], lElsifBoundingTokens[1], oStart, oEnd)
     lToi = utils.combine_two_token_class_lists(lToi, aToi)
     aToi = oFile.get_tokens_bounded_by_token_when_between_tokens(lIfBoundingTokens[0], lIfBoundingTokens[1], oStart, oEnd)
     lToi = utils.combine_two_token_class_lists(lToi, aToi)
     return lToi
コード例 #2
0
 def _get_tokens_of_interest(self, oFile):
     lToi = []
     for lTokenPair in self.lTokens:
         lToi_a = oFile.get_sequence_of_tokens_matching([lTokenPair[0], parser.whitespace, lTokenPair[1]])
         lToi = utils.combine_two_token_class_lists(lToi, lToi_a)
         lToi_a = oFile.get_sequence_of_tokens_matching(lTokenPair)
         lToi = utils.combine_two_token_class_lists(lToi, lToi_a)
     return lToi
コード例 #3
0
    def _get_tokens_of_interest(self, oFile):
        lToi_a = oFile.get_sequence_of_tokens_matching(
            [self.left_token, parser.whitespace, self.right_token])
        lToi_b = oFile.get_sequence_of_tokens_matching(
            [self.left_token, self.right_token])

        return utils.combine_two_token_class_lists(lToi_a, lToi_b)
コード例 #4
0
    def analyze(self, oFile):
        lToi = []
        lPrevious = []
        for lSequence in self.lSequences:
            if not lSequence[0] in lPrevious:
                aToi = oFile.get_tokens_bounded_by(lSequence[0], self.oLeftToken)
                lToi = utils.combine_two_token_class_lists(lToi, aToi)
            lPrevious.append(lSequence[0])

        for oToi in lToi:
            lTokens = oToi.get_tokens()
            for iToken, oToken in enumerate(lTokens):
                bFound = False
                for lSequence in self.lSequences:
                    if isinstance(oToken, lSequence[0]):
                        if utils.are_next_consecutive_token_types(lSequence, iToken, lTokens):
                            bFound = True
                            break
                        if utils.are_next_consecutive_token_types(lSequence[:-1], iToken, lTokens):
                            dAction = {}
                            dAction['num_tokens'] = len(lSequence) - 1
                            break
                        if utils.are_next_consecutive_token_types(lSequence[:-2], iToken, lTokens):
                            dAction = {}
                            dAction['num_tokens'] = len(lSequence) - 2
                            break
                            
                if bFound:
                    break
            else:
                sSolution = self.solution
                oViolation = violation.New(oToi.get_line_number(), oToi, sSolution)
                oViolation.set_action(dAction)
                self.add_violation(oViolation)
コード例 #5
0
 def _get_tokens_of_interest(self, oFile):
     lToi = []
     for lTokenPair in self.lTokenPairs:
         aToi = oFile.get_tokens_bounded_by(
             lTokenPair[0],
             lTokenPair[1],
             bExcludeLastToken=self.bExcludeLastToken)
         lToi = utils.combine_two_token_class_lists(lToi, aToi)
     return lToi
コード例 #6
0
    def analyze(self, oFile):
        lToi = []
        for lTokenPair in self.lTokens:
            lToi_a = oFile.get_sequence_of_tokens_matching_bounded_by_tokens(
                [lTokenPair[0], parser.whitespace, lTokenPair[1]], self.oStart,
                self.oEnd)
            lToi = utils.combine_two_token_class_lists(lToi, lToi_a)
            lToi_a = oFile.get_sequence_of_tokens_matching_bounded_by_tokens(
                lTokenPair, self.oStart, self.oEnd)
            lToi = utils.combine_two_token_class_lists(lToi, lToi_a)

        for oToi in lToi:
            lTokens = oToi.get_tokens()
            if len(lTokens) == 2:
                self.add_violation(
                    violation.New(oToi.get_line_number(), oToi, self.solution))
            elif len(lTokens[1].get_value()) != self.iSpaces:
                self.add_violation(
                    violation.New(oToi.get_line_number(), oToi, self.solution))
コード例 #7
0
 def _get_tokens_of_interest(self, oFile):
     lToi = []
     lPrevious = []
     for lSequence in self.lSequences:
         if not lSequence[0] in lPrevious:
             aToi = oFile.get_tokens_bounded_by(
                 lSequence[0],
                 self.oLeftToken,
                 bIncludeTillBeginningOfLine=True)
             lToi = utils.combine_two_token_class_lists(lToi, aToi)
         lPrevious.append(lSequence[0])
     return lToi
コード例 #8
0
    def analyze(self, oFile):
        lToi = []
        for lTokenPair in self.lTokenPairs:
            aToi = oFile.get_tokens_bounded_by(lTokenPair[0], lTokenPair[1], bExcludeLastToken=self.bExcludeLastToken)
            lToi = utils.combine_two_token_class_lists(lToi, aToi)

        for oToi in lToi:

            iLine, lTokens = utils.get_toi_parameters(oToi)

            if utils.are_next_consecutive_token_types_ignoring_whitespace([parser.open_parenthesis], 1, lTokens):
                continue

            iStartColumn = calculate_start_column(oFile, oToi)
            lColumn = []
            lColumn.append(iStartColumn)
            bCheckAlignment = False
            iFirstColumn = oFile.get_column_of_token_index(oToi.get_start_index())
            iColumn = iFirstColumn
            iPreviousColumn = 0
            iIndent = 0
#            print('-'*80)
            for iToken, oToken in enumerate(lTokens):

                iLine = utils.increment_line_number(iLine, oToken)

                if isinstance(oToken, parser.carriage_return):
                    bCheckAlignment = True
                    iPreviousColumn = lColumn[-1]
                    iColumn = 0
                    if isinstance(lTokens[iToken + 1], parser.whitespace):
                        iIndent = len(lTokens[iToken + 1].get_value())
                    else:
                        iIndent = 0
                    continue

                if isinstance(oToken, parser.blank_line):
                    bCheckAlignment = False
                    continue

                iColumn += len(oToken.get_value())

                if isinstance(oToken, parser.open_parenthesis):
                    lColumn.append(iColumn + iPreviousColumn - iIndent)

                if isinstance(oToken, parser.close_parenthesis):
                    lColumn.pop()

                if bCheckAlignment:
                    if isinstance(oToken, parser.whitespace):
                        if len(oToken.get_value()) != lColumn[-1]:
                            dAction = {}
                            dAction['line'] = iLine
                            dAction['column'] = lColumn[-1]
                            dAction['action'] = 'adjust'
                            dAction['indent'] = iIndent
                            dAction['previous'] = iPreviousColumn
                            oViolation = violation.New(iLine, oToi.extract_tokens(iToken, iToken), self.solution)
                            oViolation.set_action(dAction)
                            self.add_violation(oViolation)
#                            print(dAction)
                    else:
                        if lColumn != 0:
                            dAction = {}
                            if isinstance(oToken, parser.open_parenthesis):
                                dAction['column'] = lColumn[-2]
                            else:
                                dAction['column'] = lColumn[-1]
                            dAction['action'] = 'insert'
                            oViolation = violation.New(iLine, oToi.extract_tokens(iToken, iToken), self.solution)
                            oViolation.set_action(dAction)
                            self.add_violation(oViolation)
                    bCheckAlignment = False
コード例 #9
0
 def _get_tokens_of_interest(self, oFile):
     lToi = []
     for lTokenPair in self.lTokenPairs:
         lToi_a = oFile.get_tokens_bounded_by(lTokenPair[0], lTokenPair[1])
         lToi = utils.combine_two_token_class_lists(lToi, lToi_a)
     return lToi