示例#1
0
文件: preprocess.py 项目: vmfox/vunit
 def expand(self, values, previous):
     """
     Expand macro with actual values, returns a list of expanded tokens
     """
     tokens = []
     for token in self.tokens:
         if token.kind == IDENTIFIER and token.value in self.args:
             idx = self.args.index(token.value)
             value = values[idx]
             tokens += value
         else:
             tokens.append(token)
     return [Token(tok.kind, tok.value, add_previous(tok.location, previous))
             for tok in tokens]
def strip_loc(tokens):
    """
    Strip location information
    """
    return [Token(token.kind, token.value, None) for token in tokens]
示例#3
0
        def replace_keywords(token):  # pylint: disable=missing-docstring
            if token.value in KEYWORDS:
                return Token(KEYWORDS[token.value], '', token.location)

            return token
示例#4
0
 def remove_value(token):
     return Token(token.kind, '', token.location)
示例#5
0
 def slice_value(token, start=None, end=None):
     return Token(token.kind, token.value[start:end], token.location)
示例#6
0
文件: tokenizer.py 项目: wzab/vunit
 def str_value(token):
     return Token(
         token.kind,
         token.value[1:-1].replace("\\\n", "").replace("\\\"", "\""),
         token.location)
示例#7
0
 def replace_keywords(token):
     if token.value in KEYWORDS:
         return Token(KEYWORDS[token.value], '', token.location)
     else:
         return token