def _fix_violation(self, oViolation): dAction = oViolation.get_action() if dAction['convert_to'] == 'edge': lTokens = [] if dAction['edge'] == 'rising_edge': lTokens.append(token.ieee.std_logic_1164.function.rising_edge('rising_edge')) else: lTokens.append(token.ieee.std_logic_1164.function.falling_edge('falling_edge')) lTokens.append(parser.open_parenthesis()) lTokens.append(parser.todo(dAction['clock'])) lTokens.append(parser.close_parenthesis()) else: lTokens = [] lTokens.append(parser.todo(dAction['clock'])) lTokens.append(parser.tic("'")) lTokens.append(parser.event_keyword('event')) lTokens.append(parser.whitespace(' ')) lTokens.append(token.logical_operator.and_operator('and')) lTokens.append(parser.whitespace(' ')) lTokens.append(parser.todo(dAction['clock'])) lTokens.append(parser.whitespace(' ')) lTokens.append(token.relational_operator.equal('=')) lTokens.append(parser.whitespace(' ')) lTokens.append(parser.character_literal(dAction['edge'])) oViolation.set_tokens(lTokens)
def _fix_new_line_after_assign(oViolation): lTokens = oViolation.get_tokens() dAction = oViolation.get_action() if dAction['action'] == 'insert': if not isinstance(lTokens[0], parser.whitespace): lTokens.insert(0, parser.whitespace(' ')) lTokens.insert(0, parser.carriage_return()) oViolation.set_tokens(lTokens) elif dAction['action'] == 'remove': lNewTokens = [] lNewTokens.append(lTokens[0]) lNewTokens.append(parser.whitespace(' ')) lNewTokens.append(lTokens[-1]) oViolation.set_tokens(lNewTokens)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() if isinstance(lTokens[1], parser.whitespace): lTokens[1].set_value(' ') else: lTokens.insert(1, parser.whitespace(' ')) oViolation.set_tokens(lTokens)
def _fix_violation(self, oFile): for oViolation in self.violations: lTokens = oViolation.get_tokens() lTokens.append(parser.whitespace(' ')) lTokens.append(self.insert_token) oViolation.set_tokens(lTokens) oFile.update(self.violations)
def _fix_violation(self, oFile): for oViolation in self.violations: lTokens = oViolation.get_tokens() lTokens.insert(1, lTokens.pop()) lTokens.insert(1, parser.whitespace(' ')) oViolation.set_tokens(lTokens) oFile.update(self.violations)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() dAction = oViolation.get_action() lKeys = list(dAction.keys()) for sKey in lKeys: if sKey == 'left': if dAction[sKey]['action'] == 'adjust': lTokens[0].set_value(' ' * self.iSpaces) else: lTokens.insert(1, parser.whitespace(' ')) if sKey == 'right': if dAction[sKey]['action'] == 'adjust': lTokens[-1].set_value(' ' * self.iSpaces) else: lTokens.insert(len(lTokens) - 1, parser.whitespace(' ')) oViolation.set_tokens(lTokens)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() if isinstance(lTokens[-1], parser.whitespace): lTokens.pop() oToken = lTokens.pop() lTokens.insert(0, oToken) lTokens.insert(0, parser.whitespace(' ')) oViolation.set_tokens(lTokens)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() dAction = oViolation.get_action() oMoveToken = lTokens.pop(dAction['moveIndex']) lTokens.insert(dAction['insertIndex'], oMoveToken) lTokens.insert(dAction['insertIndex'], parser.whitespace(' ')) lTokens = utils.fix_blank_lines(lTokens) oViolation.set_tokens(lTokens)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() if self.action == 'remove': rule_utils.remove_optional_item(lTokens, oViolation, self.insert_token) else: lTokens.insert(1, self.insert_token) lTokens.insert(1, parser.whitespace(' ')) oViolation.set_tokens(lTokens)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() dAction = oViolation.get_action() if dAction['type'] == 'adjust': lTokens[0].set_value(' ' * dAction['adjust']) elif dAction['type'] == 'insert': lTokens.insert(0, parser.whitespace(' ' * dAction['adjust'])) oViolation.set_tokens(lTokens)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() sAction = oViolation.get_action() if sAction == 'insert': lTokens.insert(1, parser.whitespace(' ')) elif sAction == 'adjust': lTokens[1].set_value(' ') oViolation.set_tokens(lTokens)
def _fix_violation(self, oFile): for oViolation in self.violations: lTokens = oViolation.get_tokens() if isinstance(lTokens[1], parser.whitespace): lTokens[1].set_value(' ' * self.iSpaces) else: lTokens.insert(1, parser.whitespace(' ' * self.iSpaces)) oViolation.set_tokens(lTokens) oFile.update(self.violations)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() dAction = oViolation.get_action() if dAction['action'] == 'insert': lTokens.insert( len(lTokens) - 1, parser.whitespace(' ' * dAction['column'])) else: lTokens[-2].set_value(' ' * dAction['column']) oViolation.set_tokens(lTokens)
def _fix_violation(self, oFile): for oViolation in self.violations: lTokens = oViolation.get_tokens() dAction = oViolation.get_action() lMoveTokens = lTokens[0:dAction['num_tokens']] lTokens = lTokens[dAction['num_tokens']:] lTokens = lTokens[:-1] + lMoveTokens + [parser.whitespace(' ')] + [lTokens[-1]] oViolation.set_tokens(lTokens) oFile.update(self.violations)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() lTokens = utils.remove_carriage_returns_from_token_list(lTokens) lTokens = utils.remove_consecutive_whitespace_tokens(lTokens) if self.bInsertSpace: if not isinstance(lTokens[1], parser.whitespace): lTokens.insert(1, parser.whitespace(' ')) oViolation.set_tokens(lTokens)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() dAction = oViolation.get_action() # print(dAction) if dAction['action'] == 'adjust': lTokens[0].set_value(' ' * dAction['column']) else: lTokens.insert(0, parser.whitespace(' ' * dAction['column'])) oViolation.set_tokens(lTokens)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() iIndex = oViolation.get_token_value() lTokens.insert(1, lTokens.pop(iIndex)) lTokens.insert(1, parser.whitespace(' ')) lNewTokens = utils.remove_consecutive_whitespace_tokens(lTokens) lNewTokens = utils.fix_blank_lines(lNewTokens) oViolation.set_tokens(lNewTokens)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() if oViolation.get_action() == 'remove_whitespace': oViolation.set_tokens([lTokens[1]]) elif oViolation.get_action() == 'adjust_whitespace': lTokens[0].set_value(lTokens[1].get_indent() * self.indentSize * ' ') oViolation.set_tokens(lTokens) elif oViolation.get_action() == 'add_whitespace': oToken = parser.whitespace(lTokens[0].get_indent() * self.indentSize * ' ') lTokens.insert(0, oToken) oViolation.set_tokens(lTokens)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() dAction = oViolation.get_action() if len(lTokens) == 2: lTokens.insert(1, parser.whitespace(' '*dAction['adjust'])) else: iLen = len(lTokens[1].get_value()) + dAction['adjust'] lTokens[1].set_value(' '*iLen) oViolation.set_tokens(lTokens)
def _fix_violation(self, oFile): for oViolation in self.violations: lTokens = oViolation.get_tokens() for iIndex in range(0, len(lTokens)): if isinstance(lTokens[iIndex], self.anchor_token): lTokens.insert( iIndex, self.insert_token(oViolation.get_token_value())) lTokens.insert(iIndex, parser.whitespace(' ')) oViolation.set_tokens(lTokens) oFile.update(self.violations)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() dAction = oViolation.get_action() oToken = lTokens.pop(dAction['move_index']) lTokens.insert(dAction['insert'], oToken) if self.bInsertWhitespace: lTokens.insert(dAction['insert'], parser.whitespace(' ')) lTokens = utils.remove_consecutive_whitespace_tokens(lTokens) lTokens = utils.fix_blank_lines(lTokens) oViolation.set_tokens(lTokens)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() dAction = oViolation.get_action() iTokenIndex = dAction['token_index'] if isinstance(lTokens[iTokenIndex - 1], parser.whitespace): iLen = len(lTokens[iTokenIndex - 1].get_value()) lTokens[iTokenIndex - 1].set_value(' '*(iLen + dAction['adjust'])) else: lTokens.insert(iTokenIndex, parser.whitespace(' '*dAction['adjust'])) oViolation.set_tokens(lTokens)
def classify(lTokens, lObjects): ''' Classifies whitespace objects. ''' for iToken, sToken in enumerate(lTokens): if string_contains_space(sToken): if is_string_literal(sToken): pass elif is_character_literal(sToken): pass else: lObjects[iToken] = parser.whitespace(sToken)
def classify(lTokens, lObjects): ''' Classifies whitespace objects. ''' # Check for entity for iToken, sToken in enumerate(lTokens): if ' ' in sToken: if sToken[0] == '"' and sToken[-1] == '"': pass else: lObjects[iToken] = parser.whitespace(sToken)
def _fix_open_paren_new_line(oViolation): lTokens = oViolation.get_tokens() dAction = oViolation.get_action() if dAction['action'] == 'insert': lTokens.append(parser.carriage_return()) lTokens.append(parser.whitespace(' ')) oViolation.set_tokens(lTokens) elif dAction['action'] == 'remove': lNewTokens = [] lNewTokens.append(lTokens[0]) lNewTokens.append(lTokens[-1]) oViolation.set_tokens(lNewTokens)
def _fix_violation(self, oFile): for oViolation in self.violations: lTokens = oViolation.get_tokens() dAction = oViolation.get_action() if dAction['action'] == 'adjust': lTokens[0].set_value(' ' * dAction['column']) else: lTokens.insert(0, parser.whitespace(' ' * dAction['column'])) oViolation.set_tokens(lTokens) oFile.update(self.violations)
def analyze_open_paren_cases(lTokens, dAction): if open_paren_space(lTokens): dAction['left_remove'] = [0] dAction['left_insert'] = [] elif space_open_paren_space(lTokens): dAction['left_remove'] = [0, 1] dAction['left_insert'] = [] elif space_open_paren(lTokens): dAction['left_remove'] = [1] dAction['left_insert'] = [] else: dAction['left_remove'] = [0] dAction['left_insert'] = [parser.whitespace(' ')]
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() if self.action == 'remove': rule_utils.remove_optional_item(lTokens, oViolation, self.insert_token) else: if oViolation.get_token_value() is not None: for iIndex in range(0, len(lTokens)): if isinstance(lTokens[iIndex], self.anchor_token): lTokens.insert( iIndex, self.insert_token(oViolation.get_token_value())) lTokens.insert(iIndex, parser.whitespace(' ')) oViolation.set_tokens(lTokens)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() if self.action == 'add': lNewTokens = [] lNewTokens.append(lTokens[0]) lNewTokens.append(parser.whitespace(' ')) lNewTokens.extend(self.insert_tokens) lNewTokens.extend(lTokens[1:]) else: dAction = oViolation.get_action() lNewTokens = lTokens[:dAction['iStartIndex']] lNewTokens.extend(lTokens[dAction['iEndIndex']:]) lNewTokens = utils.remove_consecutive_whitespace_tokens(lNewTokens) oViolation.set_tokens(lNewTokens)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() dAction = oViolation.get_action() bInsertBlankLine = False if isinstance(lTokens[0], parser.whitespace): lTokens = lTokens[1:] bInsertBlankLine = True lMoveTokens = lTokens[0:dAction['num_tokens']] lTokens = lTokens[dAction['num_tokens']:] lTokens = lTokens[:-1] + lMoveTokens + [parser.whitespace(' ') ] + [lTokens[-1]] lTokens = utils.remove_consecutive_whitespace_tokens(lTokens) if bInsertBlankLine: lTokens.insert(0, parser.blank_line()) oViolation.set_tokens(lTokens)