def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() if isinstance(lTokens[1], parser.whitespace): lTokens.insert(-2, parser.carriage_return()) else: lTokens.insert(-1, parser.carriage_return()) oViolation.set_tokens(lTokens)
def _processFile(self): self.lAllObjects = [] for sLine in self.filecontent: lTokens = tokens.create(sLine.replace('\t', ' ').rstrip()) lObjects = [] for sToken in lTokens: lObjects.append(parser.item(sToken)) blank.classify(lObjects) whitespace.classify(lTokens, lObjects) comment.classify(lTokens, lObjects) preprocessor.classify(lTokens, lObjects) pragma.classify(lTokens, lObjects, self.lOpenPragmas, self.lClosePragmas, self.dVars) self.lAllObjects.extend(lObjects) self.lAllObjects.append(parser.carriage_return()) try: self.lAllObjects[0].set_filename(self.filename) except IndexError: pass design_file.tokenize(self.lAllObjects) post_token_assignments(self.lAllObjects) set_token_hierarchy_value(self.lAllObjects) self.oTokenMap = process_tokens(self.lAllObjects)
def _fix_violation(self, oFile): for oViolation in self.violations: lTokens = oViolation.get_tokens() lTokens.append(parser.carriage_return()) lTokens.append(parser.blank_line()) oViolation.set_tokens(lTokens) oFile.update(self.violations)
def _fix_new_line_after_comma(oViolation): lTokens = oViolation.get_tokens() dAction = oViolation.get_action() if dAction['action'] == 'insert': if isinstance(lTokens[1], parser.whitespace): lTokens.insert(1, parser.carriage_return()) else: lTokens.insert(1, parser.whitespace(' ')) lTokens.insert(1, parser.carriage_return()) oViolation.set_tokens(lTokens) elif dAction['action'] == 'remove': lNewTokens = [] lNewTokens.append(lTokens[0]) lNewTokens.append(parser.whitespace(' ')) lNewTokens.append(lTokens[-1]) oViolation.set_tokens(lNewTokens)
def _fix_violation(self, oFile): for oViolation in self.violations: lTokens = oViolation.get_tokens() dAction = oViolation.get_action() lTokens.insert(dAction['insert'], parser.carriage_return()) lTokens.insert(dAction['insert'], parser.blank_line()) oViolation.set_tokens(lTokens) oFile.update(self.violations)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() dAction = oViolation.get_action() if dAction['action'] == 'Insert': lTokens.append(parser.carriage_return()) lTokens.append(parser.blank_line()) oViolation.set_tokens(lTokens) elif dAction['action'] == 'Remove': oViolation.set_tokens([])
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() dAction = oViolation.get_action() lTemp = lTokens[dAction['iToken']:] lTemp.append(parser.carriage_return()) lTemp.extend(lTokens[:dAction['iToken']]) # lTemp[dAction['index']].set_indent(dAction['indent']) oViolation.set_tokens(lTemp)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() if self.style == 'no_blank_line': lTokens.reverse() lNewTokens = [] for iToken, oToken in enumerate(lTokens): if isinstance(oToken, parser.blank_line): lNewTokens.pop() continue lNewTokens.append(oToken) lNewTokens.reverse() oViolation.set_tokens(lNewTokens) elif self.style == 'require_blank_line': if isinstance(lTokens[-2], parser.whitespace): lTokens.insert(-3, parser.blank_line()) lTokens.insert(-3, parser.carriage_return()) else: lTokens.insert(-2, parser.blank_line()) lTokens.insert(-2, parser.carriage_return()) oViolation.set_tokens(lTokens)
def _fix_open_paren_new_line(oViolation): lTokens = oViolation.get_tokens() dAction = oViolation.get_action() if dAction['action'] == 'insert': lTokens.append(parser.carriage_return()) lTokens.append(parser.whitespace(' ')) oViolation.set_tokens(lTokens) elif dAction['action'] == 'remove': lNewTokens = [] lNewTokens.append(lTokens[0]) lNewTokens.append(lTokens[-1]) oViolation.set_tokens(lNewTokens)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() dAction = oViolation.get_action() if dAction['action'] == 'Insert': lTokens.insert(dAction['index'], parser.carriage_return()) lTokens.insert(dAction['index'], parser.blank_line()) oViolation.set_tokens(lTokens) else: iStart = dAction['start'] iEnd = dAction['end'] lNewTokens = lTokens[:iStart] lNewTokens.extend(lTokens[iEnd:]) oViolation.set_tokens(lTokens[:iStart] + lTokens[iEnd:])
def _fix_violation(self, oViolation): ''' Applies fixes for any rule violations. ''' lTokens = oViolation.get_tokens() lNewTokens = [] dAction = oViolation.get_action() for iIndex in dAction['identifier_indexes']: lNewTokens.append(lTokens[iIndex]) lNewTokens.extend(lTokens[dAction['split_index']:]) if iIndex != dAction['identifier_indexes'][-1]: lNewTokens.append(interface_list.semicolon()) lNewTokens.append(parser.carriage_return()) oViolation.set_tokens(lNewTokens)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() dAction = oViolation.get_action() lFinalTokens = [] for oIdentifier in dAction['identifiers']: lNewTokens = [] for iToken, oToken in enumerate(lTokens): if iToken < dAction['start']: lNewTokens.append(copy.deepcopy(oToken)) if iToken == dAction['start']: lNewTokens.append(oIdentifier) if iToken > dAction['end']: lNewTokens.append(copy.deepcopy(oToken)) lNewTokens = utils.remove_carriage_returns_from_token_list( lNewTokens) lFinalTokens.extend(lNewTokens) lFinalTokens.append(parser.carriage_return()) lFinalTokens.pop() oViolation.set_tokens(lFinalTokens)
def append_carriage_return(lTokens): append_token(lTokens, parser.carriage_return())
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() lTokens.append(parser.carriage_return()) oViolation.set_tokens(lTokens)
def _fix_violation(self, oViolation): lTokens = oViolation.get_tokens() dAction = oViolation.get_action() lTokens.insert(dAction['insert_index'], parser.carriage_return()) oViolation.set_tokens(lTokens)
def _fix_violation(self, oFile): for oViolation in self.violations: lTokens = oViolation.get_tokens() lTokens.insert(1, parser.carriage_return()) oViolation.set_tokens(lTokens) oFile.update(self.violations)
def insert_carriage_return(lTokens, index): insert_token(lTokens, index, parser.carriage_return())