def convertTokenArrayToDAst(out_childArray, in_arrayToken, in_fileWrapper, in_environment): cursor = Cursor(in_arrayToken) while cursor.isValid(): skipComments(cursor, in_environment) runSemanticJobs(out_childArray, cursor, in_fileWrapper, in_environment) if 0 < len(in_environment.getCommentArray()): out_childArray.append( dast.DAst( dast.DAstEnum.Comment, in_environment.consumeCommentArray() ) )
def jobIf(out_childArray, in_cursor, in_fileWrapper, in_environment): item = in_cursor.peek(0) if item and item.getTokenEnum() == tokeniser.TokenEnum.Preprocessor and item.getValue() == "#if": in_cursor.consume() token = in_cursor.consume() newChildArray = [] convertTokenArrayToDAstTillPreprocessorScopeClose(newChildArray, in_cursor, in_fileWrapper, in_environment): out_childArray.append( dast.DAst( dast.DAstEnum.PreprocessorIfdef, token.getValue(), newChildArray, in_environment.consumeCommentArray() ) )
def jobInclude(out_childArray, in_cursor, in_fileWrapper, in_environment): item = in_cursor.peek(0) if item and item.getTokenEnum() == tokeniser.TokenEnum.Preprocessor and item.getValue() == "#include": in_cursor.consume() out_childArray.append( dast.DAst( dast.DAstEnum.PreprocessorInclude, None, [], in_environment.consumeCommentArray() ) )