Пример #1
0
 def next_expanded_token(self, with_defined=False):
     context = self.context
     token = self.next_token()
     if self.context and self.context.macro_concat():
         self.next_token()
         assert self.context == context, "Catenate should stick within context"
         other = self.expect('identifier')
         return catenate_tokens(token, other, context.variables)
     if isinstance(context, ExpandedContext):
         return token
     if with_defined and token and value_of(token) == 'defined':
         token = self.next_token()
         if token and value_of(token) == '(':
             token = self.expect('identifier')
             rp = self.next_token()
             assert rp and value_of(
                 rp) == ')', "expected right parenthesis in 'defined'"
         return tokenize.token(position_of(token), "number",
                               ["0", "1"][value_of(token) in self.env])
     if token and value_of(token) == '#' and isinstance(
             context, ExpandContext) and context == self.context:
         ntoken = self.next_token()
         if value_of(ntoken) in context.variables:
             return tokenize.token(
                 position_of(ntoken), "string",
                 stringify(context.variables[value_of(ntoken)]))
         else:
             assert False, "'#' outside proper context"
     if token and name_of(token) == 'identifier':
         value = value_of(token)
         if value in context.shadow:
             return token
         if value in context.variables:
             self.context = ExpandedContext(self.context,
                                            iter(context.variables[value]),
                                            ())
             self.pump_context()
             return self.next_expanded_token()
         expansion = self.env.get(value)
         if expansion is None:
             return token
         elif callable(
                 expansion) and self.context and self.context.macro_func():
             args = self.next_macro_call()
             self.context = expansion(self.context,
                                      context.shadow + (value, ),
                                      position_of(token), args)
             self.pump_context()
             return self.next_expanded_token()
         else:
             self.context = ExpandContext(self.context, iter(expansion),
                                          context.shadow + (value, ))
             self.pump_context()
             return self.next_expanded_token()
     return token
Пример #2
0
 def next_expanded_token(self, with_defined=False):
     context = self.context
     token = self.next_token()
     if self.context and self.context.macro_concat():
         self.next_token()
         assert self.context == context, "Catenate should stick within context"
         other = self.expect('identifier')
         return catenate_tokens(token, other, context.variables)
     if isinstance(context, ExpandedContext):
         return token
     if with_defined and token and value_of(token) == 'defined':
         token = self.next_token()
         if token and value_of(token) == '(':
             token = self.expect('identifier')
             rp = self.next_token()
             assert rp and value_of(rp) == ')', "expected right parenthesis in 'defined'"
         return tokenize.token(position_of(token), "number", ["0", "1"][value_of(token) in self.env])
     if token and value_of(token) == '#' and isinstance(context, ExpandContext) and context == self.context:
         ntoken = self.next_token()
         if value_of(ntoken) in context.variables:
             return tokenize.token(position_of(ntoken), "string", stringify(context.variables[value_of(ntoken)]))
         else:
             assert False, "'#' outside proper context"
     if token and name_of(token) == 'identifier':
         value = value_of(token)
         if value in context.shadow:
             return token
         if value in context.variables:
             self.context = ExpandedContext(self.context, iter(context.variables[value]), ())
             self.pump_context()
             return self.next_expanded_token()
         expansion = self.env.get(value)
         if expansion is None:
             return token
         elif callable(expansion) and self.context and self.context.macro_func():
             args = self.next_macro_call()
             self.context = expansion(self.context, context.shadow + (value,), position_of(token), args)
             self.pump_context()
             return self.next_expanded_token()
         else:
             self.context = ExpandContext(self.context, iter(expansion), context.shadow + (value,))
             self.pump_context()
             return self.next_expanded_token()
     return token
Пример #3
0
def catenate_tokens(lhs, rhs, variables):
    position = position_of(lhs)
    if value_of(lhs) in variables:
        lhs = variables[value_of(lhs)]
        assert len(lhs) <= 1, "rare case for catenation"
    else:
        lhs = [lhs]
    if value_of(rhs) in variables:
        rhs = variables[value_of(rhs)]
        assert len(rhs) <= 1, "rare case for catenation"
    else:
        rhs = [rhs]
    return tokenize.token(position, 'identifier', ''.join(map(value_of, lhs + rhs)))
Пример #4
0
def catenate_tokens(lhs, rhs, variables):
    position = position_of(lhs)
    if value_of(lhs) in variables:
        lhs = variables[value_of(lhs)]
        assert len(lhs) <= 1, "rare case for catenation"
    else:
        lhs = [lhs]
    if value_of(rhs) in variables:
        rhs = variables[value_of(rhs)]
        assert len(rhs) <= 1, "rare case for catenation"
    else:
        rhs = [rhs]
    return tokenize.token(position, 'identifier',
                          ''.join(map(value_of, lhs + rhs)))
Пример #5
0
def process_error(state, position):
    macro_stream = state.macro_stream()
    if state.processing:
        message = ' '.join(map(value_of, macro_stream))
        return [tokenize.token(position, 'error', message)]
    return ()
Пример #6
0
def process_error(state, position):
    macro_stream = state.macro_stream()
    if state.processing:
        message = ' '.join(map(value_of, macro_stream))
        return [tokenize.token(position, 'error', message)]
    return ()