コード例 #1
0
ファイル: PreProcessor.py プロジェクト: fizixer/cAST
 def make_cToken(token):
   if token.type == 'c':
     return copy(token)
   (string, terminalId, function) = self.cLS.match(token.source_string, True)
   #print(token.source_string, terminalId, token.resource, token.lineno, token.colno)
   #newId = self.cPPTtocT[token.id]
   return cToken( terminalId, token.resource, c_Parser.terminals[terminalId], string, token.lineno, token.colno, None )
コード例 #2
0
ファイル: PreProcessor.py プロジェクト: fizixer/cAST
    def run(self, params, lineno, colno):
      if len(params) != len(self.params) and self.params[-1] != '...':
        raise Exception('Error: too %s parameters to function %s: %s' % ('many' if len(params) > len(self.params) else 'few', self.name, ', '.join([str(x) for x in params])))
      paramValues = dict()
      for (index, param) in enumerate(self.params):
        if param == '...':
          if index != (len(self.params) - 1):
            raise Exception('Error: ellipsis must be the last parameter in parameter list')
          paramValues['__VA_ARGS__'] = []
          for va_arg_rlist, next in zip_longest(params[index:], params[index+1:]):
            paramValues['__VA_ARGS__'].extend(va_arg_rlist)
            if next:
              paramValues['__VA_ARGS__'].append(cToken(self.cP.terminals['comma'], '<stream>', 'comma', ',', 0, 0, None))
        else:
          paramValues[param] = params[index]
      nodes = []
      if not self.body:
        return nodes
      for node in self.body.getAttr('tokens'):
        if node.terminal_str.lower() == 'identifier' and node.getString() in paramValues:
          val = paramValues[node.getString()]
          if isinstance(val, list):
            nodes.extend(deepcopy(val))
          else:
            nodes.append(copy(val))
        else:
          newNode = copy(node)
          nodes.append(newNode)
      nodes = self.cPE._eval(ppAst('ReplacementList', {'tokens': nodes}))
      for node in nodes:
        node.lineno = lineno
        node.colno = colno

      return nodes
コード例 #3
0
ファイル: cLexer.py プロジェクト: fizixer/cAST
def parseComma( string, lineno, colno, terminalId, lexer ):
  token( string, lineno, colno, terminalId, lexer )
  if (lexer.braceLevel, lexer.parenLevel) in lexer.typedefBlocks:
    tId = c_Parser.TERMINAL_TYPEDEF_IDENTIFIER
    if lexer.lastIdentifier:
      lexer.typedefs[lexer.lastIdentifier.source_string] = cToken(tId, lexer.resource, c_Parser.terminals[tId], lexer.lastIdentifier.source_string, lineno, colno, lexer.getContext())
    else:
      raise Exception('no last identifier')
コード例 #4
0
ファイル: cLexer.py プロジェクト: fizixer/cAST
  def parse_parameter_list(self, tokenIterator):
    param = []
    params = []
    hint = c_Parser.TERMINAL_ABSTRACT_PARAMETER_HINT
    startParenLevel = self.hint_parenLevel
    start = True
    while True:
      try:
        token = next(tokenIterator)
        self.update_hint_context(token)
      except StopIteration:
        break

      if start and token.id == c_Parser.TERMINAL_RPAREN:
        return [token]
      start = False

      if token.id == c_Parser.TERMINAL_LPAREN and \
         ( tokenIterator.check('+1', declaration_specifiers()) or \
           tokenIterator.check('+1', [c_Parser.TERMINAL_RPAREN]) ):
        param.append(token)
        param.extend(self.parse_parameter_list(tokenIterator))
        continue
      elif (token.id == c_Parser.TERMINAL_COMMA) or \
           (token.id == c_Parser.TERMINAL_RPAREN and self.hint_parenLevel == startParenLevel - 1):
        params.append(cToken(hint, self.resource, c_Parser.terminals[hint], '', param[0].lineno, param[0].colno, self.getContext()))
        params.extend(param)
        params.append(token)
        param = []
        hint = c_Parser.TERMINAL_ABSTRACT_PARAMETER_HINT
        if token.id == c_Parser.TERMINAL_RPAREN:
          break
        continue
      else:
        param.append(token)
        if token.id == c_Parser.TERMINAL_IDENTIFIER:
          hint = c_Parser.TERMINAL_NAMED_PARAMETER_HINT

    if len(param):
      params.append(cToken(hint, self.resource, c_Parser.terminals[hint], '', param[0].lineno, param[0].colno, self.getContext()))
      params.extend(param)
      params.append(token)

    return params
コード例 #5
0
ファイル: cLexer.py プロジェクト: fizixer/cAST
def parseSemi( string, lineno, colno, terminalId, lexer ):
  token( string, lineno, colno, terminalId, lexer )
  if (c_Parser.TERMINAL_SEMI, lexer.braceLevel,) in lexer.endifTokens:
    lexer.endifTokens = lexer.endifTokens.difference({(c_Parser.TERMINAL_SEMI, lexer.braceLevel)})
    token('', lineno, colno, c_Parser.TERMINAL_ENDIF, lexer)
  if (lexer.braceLevel, lexer.parenLevel) in lexer.typedefBlocks:
    lexer.typedefBlocks = lexer.typedefBlocks.difference({(lexer.braceLevel, lexer.parenLevel)})
    tId = c_Parser.TERMINAL_TYPEDEF_IDENTIFIER
    if lexer.lastIdentifier:
      lexer.typedefs[lexer.lastIdentifier.source_string] = cToken(tId, lexer.resource, c_Parser.terminals[tId], lexer.lastIdentifier.source_string, lineno, colno, lexer.getContext())
    else:
      raise Exception('no last identifier')
コード例 #6
0
ファイル: cLexer.py プロジェクト: fizixer/cAST
def token(string, lineno, colno, terminalId, lexer):
  matchedToken = cToken(terminalId, lexer.resource, c_Parser.terminals[terminalId], string, lineno, colno, lexer.getContext())
  lexer.addToken(matchedToken)
  return matchedToken
コード例 #7
0
ファイル: cLexer.py プロジェクト: fizixer/cAST
 def __next__(self):
   token = super().__next__()
   return cToken(token.id, self.resource, token.terminal_str, token.source_string, token.lineno, token.colno, context=self.getContext())
コード例 #8
0
ファイル: cLexer.py プロジェクト: fizixer/cAST
  def parseExternalDeclaration(self, tokenIterator):
    # returns as soon as a hint is determined or token stream ends
    ytokens = []
    xtokens = []
    self.lock = True
    self.keepGoing = True
    collectDeclarationSpecifiers = True

    while self.keepGoing:
      self.keepGoing = parseParams = funcFound = rparenFound = identFound = parametersParsed = False
      hintId = False
      ztokens = []
      declarationSpecifiers = []

      while True:
        try:
          token2 = next(tokenIterator)
        except StopIteration:
          break

        self.update_hint_context(token2)

        if collectDeclarationSpecifiers:

          if self.hint_braceLevel in self.hint_structDecl:
            declarationSpecifiers.append(token2)

            if parseParams and token2.id == c_Parser.TERMINAL_LPAREN and \
               ( tokenIterator.check('+1', declaration_specifiers()) or \
                 tokenIterator.check('+1', [c_Parser.TERMINAL_RPAREN, c_Parser.TERMINAL_IDENTIFIER]) ):
              paramTokens =  self.parse_parameters(tokenIterator)
              declarationSpecifiers.extend(paramTokens)
              parseParams = False
            if token2.id == c_Parser.TERMINAL_RBRACE:
              self.hint_structDecl = self.hint_structDecl.difference({self.hint_braceLevel})
            if token2.id == c_Parser.TERMINAL_IDENTIFIER and self.hint_parenLevel > 0:
              parseParams = True
              continue
            if len(self.hint_structDecl) == 0:
              collectDeclarationSpecifiers = False
            continue

          elif token2.id in {c_Parser.TERMINAL_STRUCT, c_Parser.TERMINAL_UNION}:
            declarationSpecifiers.append(token2)
            while True:
              try:
                n = next(tokenIterator)
                self.update_hint_context(n)
                declarationSpecifiers.append(n)
                if n.id == c_Parser.TERMINAL_LBRACE:
                  break
              except StopIteration:
                break
            self.hint_structDecl = self.hint_structDecl.union({self.hint_braceLevel})
            continue
          else:
            declarationSpecifiers.append(token2)
          if not tokenIterator.check('+1', declaration_specifiers()):
            collectDeclarationSpecifiers = False
          continue

        ztokens.append(token2)

        if self.hint_braceLevel == 0 and \
           token2.id == c_Parser.TERMINAL_IDENTIFIER and \
           (self.hint_parenLevel > 0 or tokenIterator.check('+1', [c_Parser.TERMINAL_LPAREN])):
          parseParams = True
          if tokenIterator.check('+1', [c_Parser.TERMINAL_LPAREN]):
            funcFound = True
          continue

        if parseParams and token2.id == c_Parser.TERMINAL_LPAREN and \
           ( tokenIterator.check('+1', declaration_specifiers()) or \
             tokenIterator.check('+1', [c_Parser.TERMINAL_RPAREN, c_Parser.TERMINAL_IDENTIFIER]) ):
          paramTokens = self.parse_parameters(tokenIterator)
          ztokens.extend(paramTokens)

          if tokenIterator.check('+1', [c_Parser.TERMINAL_LBRACE]):
            hintId = c_Parser.TERMINAL_FUNCTION_DEFINITION_HINT
          elif tokenIterator.check('+1', declaration_specifiers()):
            hintId = c_Parser.TERMINAL_FUNCTION_DEFINITION_HINT
            ztokens.extend( self.parse_until(tokenIterator, c_Parser.TERMINAL_LBRACE) )

          if funcFound and hintId:
            break
          continue

        if token2.id in [c_Parser.TERMINAL_SEMI, c_Parser.TERMINAL_COMMA]:
          if self.hint_braceLevel == 0 and self.hint_parenLevel == 0:
            if funcFound:
              hintId = c_Parser.TERMINAL_FUNCTION_PROTOTYPE_HINT
            else:
              hintId = c_Parser.TERMINAL_DECLARATOR_HINT

            if token2.id == c_Parser.TERMINAL_COMMA:
              self.keepGoing = True
            break

      ytokens.extend(declarationSpecifiers)
      if hintId != False:
        first = declarationSpecifiers[0] if len(declarationSpecifiers) else ztokens[0]
        hint = cToken(hintId, self.resource, c_Parser.terminals[hintId], '', first.lineno, first.colno, self.getContext())
        ytokens.append(hint)
      ytokens.extend(ztokens)
    # endwhile


    first = ytokens[0] if len(ytokens) else ztokens[0]
    edHintId = c_Parser.TERMINAL_EXTERNAL_DECLARATION_HINT
    edHint = cToken(edHintId, self.resource, c_Parser.terminals[edHintId], '', first.lineno, first.colno, self.getContext());
    xtokens.append(edHint)
    xtokens.extend(ytokens)
    self.hint_lock = False
    return xtokens
コード例 #9
0
ファイル: cLexer.py プロジェクト: fizixer/cAST
def parseLabelIdentifier( string, lineno, colno, terminalId, lexer ):
  hintId = c_Parser.TERMINAL_LABEL_HINT
  ctx = lexer.getContext()
  lexer.addToken(cToken(hintId, lexer.resource, c_Parser.terminals[hintId], '', lineno, colno, context=ctx))
  lexer.addToken(cToken(terminalId, lexer.resource, c_Parser.terminals[terminalId], string, lineno, colno, context=ctx))