Example #1
0
    def E(self, token):

        node1 = self.L(token)

        if node1 is None:
            return None

        token = self.lexical.nextToken()

        if token is None:
            return node1

        tokensToCheck = {
            categories_const.TOKEN_OR: nodes_const.NODE_OR,

        }

        if token.category in tokensToCheck:
            nodeType = tokensToCheck[token.category]
            next_token = self.lexical.nextToken()

            if next_token is None:
                raise CompileException('%s : 2nd part missing' % (nodeType.name), token=token)

            node2 = self.E(next_token)

            if node2 is None:
                raise CompileException(
                    '%s : wrong 2nd part, (next_token: %s)' % (nodeType.name, str(next_token)), token=token)

            self.size += 1
            return Node(nodeType, [node1, node2], token=token)

        self.lexical.undo()
        return node1
Example #2
0
    def F(self, token):
        node1 = self.P(token)

        if node1 is None:
            return None

        token = self.lexical.nextToken()

        if token is None:
            return node1

        tokensToCheck = {
            categories_const.TOKEN_MULTIPLICATION: nodes_const.NODE_MULT,
            categories_const.TOKEN_DIVIDER: nodes_const.NODE_DIV,
            categories_const.TOKEN_MODULO: nodes_const.NODE_MOD
        }

        if token.category in tokensToCheck:
            nodeType = tokensToCheck[token.category]
            next_token = self.lexical.nextToken()

            if next_token is None:
                raise CompileException('%s : 2nd part missing' % (nodeType.name), token=token)

            node2 = self.F(next_token)

            if node2 is None:
                raise CompileException(
                    '%s : wrong 2nd part, (next_token: %s)' % (nodeType.name, str(next_token)), token=token)

            self.size += 1
            return Node(nodeType, [node1, node2], token=token)

        self.lexical.undo()
        return node1
Example #3
0
    def semSymbol(self, node):
        if node.type == syntax.nodes_const.NODE_BLOCK:
            self.tableSymbol.startBlock()
            for child in node.children:
                self.semSymbol(child)
            self.tableSymbol.endBlock()

        elif node.type == syntax.nodes_const.NODE_VAR_DECL:
            symbol = self.tableSymbol.newSymbol(node)
            symbol.slot = self.nextNbVar()
            node.slot = symbol.slot

        elif node.type in [
                syntax.nodes_const.NODE_VAR_REF,
                syntax.nodes_const.NODE_AFFECTATION,
                syntax.nodes_const.NODE_INDIRECTION,
                syntax.nodes_const.NODE_INDEX,
                syntax.nodes_const.NODE_FUNC_CALL
        ]:
            try:
                symbol = self.tableSymbol.getSymbol(node)
            except:
                raise CompileException("Using unknow reference", node.token)
            node.slot = symbol.slot
            for child in node.children:
                self.semSymbol(child)

        else:
            for child in node.children:
                self.semSymbol(child)
Example #4
0
    def semLoop(self, node):
        if node.type == nodes_const.NODE_LOOP:
            return
        if node.type == nodes_const.NODE_CONTINUE or node.type == nodes_const.NODE_BREAK:
            raise CompileException(
                "Sementique exception : %s not in loop" % node.type.name,
                node.token)

        for c in node.children:
            self.semLoop(c)
Example #5
0
    def C(self, token):

        node1 = self.T(token)

        if node1 is None:
            return None

        token = self.lexical.nextToken()

        if token is None:
            return node1

        tokensToCheck = {
            categories_const.TOKEN_EQUALS: nodes_const.NODE_EQUALS,
            categories_const.TOKEN_NOT_EQUALS: nodes_const.NODE_NOT_EQUALS,
            categories_const.TOKEN_LOWER_THAN: nodes_const.NODE_LOWER_THAN,
            categories_const.TOKEN_LOWER_EQUALS_THAN: nodes_const.NODE_LOWER_EQUALS,
            categories_const.TOKEN_GREATER_THAN: nodes_const.NODE_GREATER_THAN,
            categories_const.TOKEN_GREATER_EQUALS_THAN: nodes_const.NODE_GREATER_EQUALS,

        }

        if token.category in tokensToCheck:
            nodeType = tokensToCheck[token.category]
            next_token = self.lexical.nextToken()

            if next_token is None:
                raise CompileException('%s : 2nd part missing' % (nodeType.name), token=token)

            node2 = self.C(next_token)

            if node2 is None:
                raise CompileException(
                    '%s : wrong 2nd part, (next_token: %s)' % (nodeType.name, str(next_token)), token=token)

            self.size += 1
            return Node(nodeType, [node1, node2], token=token)

        self.lexical.undo()
        return node1
Example #6
0
    def D(self, token):
        if token.category != categories_const.TOKEN_INT:
            raise CompileException("Function : Missing int", token=token)

        nextTokenIdent = self.lexical.nextToken()
        if nextTokenIdent is None or nextTokenIdent.category != categories_const.TOKEN_IDENT:
            raise CompileException("Function : Missing function name", token=nextTokenIdent)

        nextToken = self.lexical.nextToken()
        if nextToken is None or nextToken.category != categories_const.TOKEN_PARENTHESIS_OPEN:
            raise CompileException("Function : Missing opening parenthesis", token=nextToken)

        nextToken = self.lexical.nextToken()

        params = []

        while nextToken.category == categories_const.TOKEN_INT:
            nextToken = self.lexical.nextToken()
            if nextToken is None or nextToken.category != categories_const.TOKEN_IDENT:
                raise CompileException("Function : Missing params name", token=nextToken)
            params.append(nextToken.identifier)

            nextToken = self.lexical.nextToken()
            if nextToken is None:
                raise CompileException("Function : parameter not finished", token=token)
            if nextToken.category != categories_const.TOKEN_COMMA:
                #nextToken = self.lexical.nextToken()
                break

            nextToken = self.lexical.nextToken()


        if nextToken is None or nextToken.category != categories_const.TOKEN_PARENTHESIS_CLOSE:
            raise CompileException("Function : Missing closing parenthesis", token=nextToken)

        nextToken = self.lexical.nextToken()
        if nextToken is None:
            raise CompileException("Function : Missing function body", token=token)

        nodeS = self.S(nextToken)
        if nodeS is None:
            raise CompileException("Function : Missing function body", token=nextToken)

        return Node(type=nodes_const.NODE_FUNC, children=[nodeS], params=params, identifier=nextTokenIdent.identifier, token=token)
Example #7
0
    def semFunction(self, node):
        if node.type == nodes_const.NODE_PROGRAM:
            for func in node.children:
                self.semFunction(func)
            return
        if node.type == nodes_const.NODE_FUNC:
            self.tableSymbol.newSymbol(node)  # evite 2 fonction du meme nom
            self.nbVar = 0
            self.tableSymbol.startBlock()

            for param in node.params:
                symbolParam = self.tableSymbol.newSymbolIdent(param, node=None)
                symbolParam.slot = self.nextNbVar()

            self.analyseSemntique(node.children[0])

            self.tableSymbol.endBlock()
            node.nbLocal = self.nbVar - len(node.params)

            return

        raise CompileException(
            "Sementique exception : need to define function ", node.token)
Example #8
0
def tokenize(input_str, nbLineRuntime=0):
    """
    :param input_str: Chaine entree
    :return: listes des tokens
    """
    charIndex = 0
    line = 1 - nbLineRuntime
    column = 1

    nbChar = len(input_str)
    tokens = []
    while charIndex < nbChar:
        c = input_str[charIndex]

        if ord(c) == 13:  # 13 = \r
            column = 1
            charIndex += 1
            continue
        elif ord(c) == 10:  # 10 = '\n'
            line += 1
            column = 1
            charIndex += 1
            continue
        elif str.isspace(c):
            charIndex += 1
            column += 1
            continue

        if str.isalpha(c):

            charIndexEnd = charIndex
            while charIndexEnd < nbChar and str.isalnum(input_str[charIndexEnd]):
                charIndexEnd += 1

            currentSymbol = input_str[charIndex:charIndexEnd]

            if currentSymbol in categories_const.MAP_TOKENS:
                tokens.append(Token(categories_const.MAP_TOKENS[currentSymbol], line, column))
            else:
                tokens.append(
                    Token(categories_const.TOKEN_IDENT, line, column, identifier=currentSymbol))

            column += (charIndexEnd - charIndex)
            charIndex = charIndexEnd
            continue
        elif str.isdigit(c):
            charIndexEnd = charIndex

            while charIndexEnd < nbChar and str.isdigit(input_str[charIndexEnd]):
                charIndexEnd += 1

            currentSymbol = input_str[charIndex:charIndexEnd]
            tokens.append(Token(categories_const.TOKEN_VALUE, line, column, value=int(currentSymbol)))

            column += (charIndexEnd - charIndex)
            charIndex = charIndexEnd
            continue
        else:

            if c in categories_const.MAP_TOKENS:
                category = categories_const.MAP_TOKENS[c]
                if category in categories_const.TOKEN_MULTI_CHARS:

                    if len(input_str) > charIndex + 1:
                        currentSymbol = input_str[charIndex:charIndex+2]
                        if currentSymbol in categories_const.MAP_TOKENS:
                            category = categories_const.MAP_TOKENS[currentSymbol]
                            tokens.append(Token(category, line, column))
                            charIndex += 2
                            column += 2
                            continue

                if category == categories_const.TOKEN_COMMENT:
                    while charIndex < nbChar and ord(c) != 10:
                        charIndex += 1
                        c = input_str[charIndex]
                    continue


                tokens.append(Token(category, line, column) )

            elif c in categories_const.TOKEN_UNIQUE_MULTI_CHARS:
                charOffset = 1
                while len(input_str) > charIndex + charOffset:
                    currentSymbol = input_str[charIndex:charIndex + (charOffset + 1)]

                    if currentSymbol in categories_const.MAP_TOKENS:
                        category = categories_const.MAP_TOKENS[currentSymbol]
                        tokens.append(Token(category, line, column))
                        charIndex += charOffset
                        column += charOffset
                        break
            else:
                raise CompileException("Tokenizer : You can't use this char : " + c, line=line)

        charIndex += 1
        column += 1
    return tokens
Example #9
0
    def A(self, token):

        # c'est un pointeur
        if token.category == categories_const.TOKEN_MULTIPLICATION:
            tokenPtn = token
            tokenIdent = self.lexical.nextToken()

            if tokenIdent is None or tokenIdent.category != categories_const.TOKEN_IDENT:
                raise CompileException("Pointeur : missing identifier", token=tokenPtn)

            nextToken = self.lexical.nextToken()
            if nextToken is None:
                raise CompileException("Pointeur : using *indent with nothing after", token=token)

            if nextToken.category == categories_const.TOKEN_SEMICOLON:
                self.lexical.undo(tokenPtn)
                return None

            if nextToken.category != categories_const.TOKEN_AFFECT:
                raise CompileException("Pointeur : missing equals", token=tokenPtn)

            #nextToken = self.lexical.nextToken()

            nodeE = self.E(self.lexical.nextToken())

            if nodeE is None:
                raise CompileException("Pointeur : Missing expression", token=tokenPtn)

            self.size += 1
            return Node(nodes_const.NODE_INDIRECTION, [nodeE], identifier=tokenIdent.identifier, token=token)



        tokenIden = token

        nextToken = self.lexical.nextToken()
        if nextToken is None:
            raise CompileException("Missing opening bracket", token=token)

        # if [ E ] -> index

        if nextToken.category == categories_const.TOKEN_SQUARE_BRACKET_OPEN:
            #on doit etre en index

            nextToken = self.lexical.nextToken()
            if nextToken is None:
                raise CompileException("Index : Missing expression", token=token)

            nodeE = self.E(nextToken)

            if nodeE is None:
                raise CompileException("Index : Missing expression", token=token)

            nextToken = self.lexical.nextToken()
            if nextToken is None or nextToken.category != categories_const.TOKEN_SQUARE_BRACKET_CLOSE:
                raise CompileException("Index : Missing closing square bracket", token=token)

            nextToken = self.lexical.nextToken()

            if nextToken is None or nextToken.category != categories_const.TOKEN_AFFECT:
                raise CompileException("Index : Missing Affectation", token=token)

            tokenExpression = self.lexical.nextToken()

            if tokenExpression is None:
                raise CompileException("Index : Missing expression after equals", token=token)

            nodeAfterExpression = self.E(tokenExpression)

            if nodeAfterExpression is None:
                raise CompileException("Index : incorrect expression after equals", token=token)

            self.size += 1
            return Node(nodes_const.NODE_INDEX, [nodeE, nodeAfterExpression], identifier=tokenIden.identifier, token=token)


        if nextToken.category != categories_const.TOKEN_AFFECT:
            self.lexical.undo()
            return None

        tokenExpression = self.lexical.nextToken()

        if tokenExpression is None:
            raise CompileException("Affectation : Missing expression after equals", token=token)

        nodeAfterExpression = self.E(tokenExpression)

        if nodeAfterExpression is None:
            raise CompileException("Affectation : incorrect expression after equals", token=token)

        self.size += 1
        return Node(nodes_const.NODE_AFFECTATION, [nodeAfterExpression], identifier=tokenIden.identifier, token=token)
Example #10
0
    def P(self, token):
        if token.category == categories_const.TOKEN_VALUE:
            self.size += 1
            return Node(nodes_const.NODE_CONSTANT, value=token.value, token=token)

        if token.category == categories_const.TOKEN_IDENT:
            self.size += 1

            next_token = self.lexical.nextToken()
            if next_token is None:
                raise CompileException('Identifier with nothing after', token=token)

            if next_token.category == categories_const.TOKEN_SQUARE_BRACKET_OPEN:
                next_token = self.lexical.nextToken()

                nodeE = self.E(next_token)

                if nodeE is None:
                    raise CompileException("Index read : Invalid expression", token=token)

                next_token = self.lexical.nextToken()
                if next_token is None:
                    raise CompileException('Index read : Missing end of index', token=token)
                if next_token.category != categories_const.TOKEN_SQUARE_BRACKET_CLOSE:
                    raise CompileException("Index read : Missing closing bracket", token=token)

                self.size += 1
                return Node(nodes_const.NODE_INDEX, [nodeE], identifier=token.identifier, token=token)


            if next_token.category == categories_const.TOKEN_PARENTHESIS_OPEN:
                list_param = []

                while not self.lexical.isEnd():
                    next_token = self.lexical.nextToken()
                    if next_token is None:
                        raise CompileException('function called but argument are not finished', token=token)
                    if next_token.category == categories_const.TOKEN_PARENTHESIS_CLOSE:
                        self.size += 1
                        return Node(nodes_const.NODE_FUNC_CALL, identifier=token.identifier, children=list_param, token=token)
                    if next_token.category == categories_const.TOKEN_COMMA:
                        continue

                    next_node = self.E(next_token)
                    if next_node is None:
                        raise CompileException('Function called but parametter is not valid' ,token=next_token)

                    list_param.append(next_node)

                    # we don't return => return None at end of function

            else:
                self.lexical.undo()
                return Node(nodes_const.NODE_VAR_REF, identifier=token.identifier, token=token)

        if token.category == categories_const.TOKEN_PARENTHESIS_OPEN:
            node = self.E(self.lexical.nextToken())
            nxtToken = self.lexical.nextToken()
            if nxtToken is None:
                raise CompileException('Calling function not correct.', token=token)
            if nxtToken.category == categories_const.TOKEN_PARENTHESIS_CLOSE:
                return node

        if token.category == categories_const.TOKEN_MINUS:
            node = self.P(self.lexical.nextToken())

            if node is None:
                raise CompileException('NEGATIVE : 2nd part missing', token=token)

            self.size += 1
            return Node(nodes_const.NODE_UNITARY_MINUS, [node], token=token)

        if token.category == categories_const.TOKEN_NOT:
            node = self.P(self.lexical.nextToken())

            if node is None:
                raise CompileException('NOT : 2nd part missing', token=token)

            self.size += 1
            return Node(nodes_const.NODE_NOT, [node], token=token)

        if token.category == categories_const.TOKEN_MULTIPLICATION:
            node = self.P(self.lexical.nextToken())

            if node is None:
                raise CompileException('NOT : 2nd part missing', token=token)

            self.size += 1
            return Node(nodes_const.NODE_INDIRECTION, [], identifier=node.identifier, token=token)

        return None
Example #11
0
    def S(self, token):

        if token.category == categories_const.TOKEN_CURLY_BRACKET_OPEN:
            # '{' S* '}'
            nextToken = self.lexical.nextToken()

            nodesBlockChildren = []

            while nextToken is not None and nextToken.category is not categories_const.TOKEN_CURLY_BRACKET_CLOSE:
                nodeS = self.S(nextToken)

                if nodeS is not None:
                    nodesBlockChildren.append(nodeS)
                nextToken = self.lexical.nextToken()

            if nextToken is None:
                raise CompileException('Block : Bracket not closed', token=token)

            nodeBlock = Node(nodes_const.NODE_BLOCK, nodesBlockChildren, token=token)
            #nextToken = self.lexical.nextToken()
            self.size += 1
            return nodeBlock

        # Debut gestion A

        # if nextToken is None:
        #     raise CompileError('Statement is not finish', token=token)

        nodeA = self.A(token)

        if nodeA is not None:
            nextTokenAfterA = self.lexical.nextToken()
            if nextTokenAfterA is None:
                raise CompileException('Affectation  Missing semicolon.', token=token)

            if nextTokenAfterA.category == categories_const.TOKEN_SEMICOLON:
                return nodeA

            raise CompileException("Affectation: Missing semicolon", token=token)

        # Fin gestion A

        #Debut gestion E

        nodeE = self.E(token)

        if nodeE is not None:
            nextTokenAfterE = self.lexical.nextToken()
            if nextTokenAfterE is None:
                raise CompileException('Expression : missing semicolon', token=token)

            if nextTokenAfterE.category == categories_const.TOKEN_SEMICOLON:
                self.size += 1
                return Node(nodes_const.NODE_DROP, [nodeE], token=token)

            raise CompileException("Expression: Missing semicolon", token=token)

        # fin gestion E

        # debut gestion out

        if token.category == categories_const.TOKEN_OUT:
            nextToken = self.lexical.nextToken()

            if nextToken is None:
                raise CompileException("Out: Missing expression to print", token=token)

            nodeExpression = self.E(nextToken)

            if nodeExpression is None:
                raise CompileException("Out: unexpected expression to print (%s)" % str(nextToken))

            nextTokenAfterE = self.lexical.nextToken()
            if nextTokenAfterE is None:
                raise CompileException('Out: Missing semicolon', token=token)

            if nextTokenAfterE.category == categories_const.TOKEN_SEMICOLON:
                self.size += 1
                return Node(nodes_const.NODE_OUT, [nodeExpression], token=token)

            raise CompileException("Out: Missing semicolon", token=token)


        # fin gestion out

        # Debut gestion if

        if token.category == categories_const.TOKEN_IF:

            nextToken = self.lexical.nextToken()

            if nextToken is None:
                raise CompileException('IF : not finished statement', token=token)

            if nextToken.category != categories_const.TOKEN_PARENTHESIS_OPEN:
                raise CompileException('IF : Missing opening parenthesis for condition', token=token)

            #self.lexical.undo()

            nodeCondition = self.E(nextToken)

            if nodeCondition is None:
                raise CompileException('IF : Missing condition', token=token)

            nextToken = self.lexical.nextToken()

            if nextToken is None:
                raise CompileException('IF : Missing statement', token=token)

            nodeS1 = self.S(nextToken)
            if nodeS1 is None:
                raise CompileException('IF : Missing statement', token=token)

            tokenElse = self.lexical.nextToken()
            if tokenElse is None or tokenElse.category != categories_const.TOKEN_ELSE:
                self.lexical.undo()
                self.size += 1
                return Node(nodes_const.NODE_IF, [nodeCondition, nodeS1], token=token)

            nextToken = self.lexical.nextToken()
            if nextToken is None:
                raise CompileException('Else : Missing statement', token=token)
            nodeS2 = self.S(nextToken)
            if nodeS2 is None:
                raise CompileException('Else : Missing statement', token=token)
            self.size += 1
            return Node(nodes_const.NODE_IF, [nodeCondition, nodeS1, nodeS2], token=token)

        # fin gestion IF


        # debut gestion WHILE

        if token.category == categories_const.TOKEN_WHILE:
            nextToken = self.lexical.nextToken()

            if nextToken is None:
                raise CompileException('WHILE : not finished statement', token=token)

            if nextToken.category != categories_const.TOKEN_PARENTHESIS_OPEN:
                raise CompileException('WHILE : Missing opening parenthesis for condition', token=token)

            # self.lexical.undo()

            nodeCondition = self.E(nextToken)

            if nodeCondition is None:
                raise CompileException('WHILE : Missing condition', token=token)

            nextToken = self.lexical.nextToken()

            if nextToken is None:
                raise CompileException('WHILE : Missing statement', token=token)

            nodeS = self.S(nextToken)
            if nodeS is None:
                raise CompileException('WHILE : Missing statement', token=token)

            self.size += 3
            nodeIf = Node(nodes_const.NODE_IF, children=[nodeCondition, nodeS, Node(nodes_const.NODE_BREAK)], token=token)
            arrayIf = []
            arrayIf.append(nodeIf)


            return Node(nodes_const.NODE_LOOP, children=[nodeIf], token=token)

        # fin gestion WHILE


        # debut gestion DO-WHILE

        if token.category == categories_const.TOKEN_DO:
            nextToken = self.lexical.nextToken()

            if nextToken is None:
                raise CompileException('DO-WHILE : not finished statement', token=token)


            nodeS = self.S(nextToken)

            if nodeS is None:
                raise CompileException('DO-WHILE : invalid block', token=token)

            nextToken = self.lexical.nextToken()
            if nextToken is None or nextToken.category != categories_const.TOKEN_WHILE:
                raise CompileException('DO-WHILE : Missing While', token=token)

            nextToken = self.lexical.nextToken()
            if nextToken is None or nextToken.category != categories_const.TOKEN_PARENTHESIS_OPEN:
                raise CompileException('DO-WHILE : Missing opening parenthesis for condition', token=token)

            # self.lexical.undo()

            nodeCondition = self.E(nextToken)

            if nodeCondition is None:
                raise CompileException('DO-WHILE : Missing condition', token=token)


            self.size += 5
            nodeIf = Node(nodes_const.NODE_IF, children=[nodeCondition, Node(nodes_const.NODE_CONTINUE, token=token), Node(nodes_const.NODE_BREAK, token=token)], token=token)
            arrayIf = []
            arrayIf.append(nodeIf)

            nodeBlock = Node(nodes_const.NODE_BLOCK, children=[nodeS, nodeIf], token=token)
            nodeLoop = Node(nodes_const.NODE_LOOP, children=[nodeBlock], token=token)
            return nodeLoop

        # fin gestion DO-WHILE

        # debut gestion for

        if token.category == categories_const.TOKEN_FOR:
            nextToken = self.lexical.nextToken()

            if nextToken is None:
                raise CompileException('FOR : not finished statement', token=token)

            if nextToken.category != categories_const.TOKEN_PARENTHESIS_OPEN:
                raise CompileException('FOR : Missing opening parenthesis for params', token=token)

            # self.lexical.undo()

            # AFFECTATION

            nextToken = self.lexical.nextToken()
            if nextToken is None:
                raise CompileException('FOR : Missing init affectation statement', token=token)

            nodeAffectation = self.A(nextToken)
            if nodeAffectation is None:
                raise CompileException('FOR : invalid init affectation statement', token=token)

            # FIN AFFECTATION

            nextToken = self.lexical.nextToken()
            if nextToken is None or nextToken.category != categories_const.TOKEN_SEMICOLON:
                raise CompileException('FOR : Missing semicolon betweet affectation and condition statement', token=token)

            # Condition

            nextToken = self.lexical.nextToken()
            if nextToken is None:
                raise CompileException('FOR : Missing init condition statement', token=token)

            nodeCondition = self.E(nextToken)
            if nodeCondition is None:
                raise CompileException('FOR : invalid init condition statement', token=token)

            # Fin condtion

            nextToken = self.lexical.nextToken()
            if nextToken is None or nextToken.category != categories_const.TOKEN_SEMICOLON:
                raise CompileException(
                    'FOR : Missing semicolon betweet condition and incrementation statement', token=token)

            # Incrementation

            nextToken = self.lexical.nextToken()
            if nextToken is None:
                raise CompileException('FOR : Missing init incrementation statement', token=token)

            nodeIncrementation = self.A(nextToken)
            if nodeIncrementation is None:
                raise CompileException('FOR : invalid init incrementation statement', token=token)

            #fin incrementation

            nextToken = self.lexical.nextToken()
            if nextToken is None:
                raise CompileException('FOR : not finished statement', token=token)

            if nextToken.category != categories_const.TOKEN_PARENTHESIS_CLOSE:
                raise CompileException('FOR : Missing closing parenthesis for params', token=token)

            nextToken = self.lexical.nextToken()

            if nextToken is None:
                raise CompileException('FOR : missing block', token=token)
            nodeS = self.S(nextToken)
            if nodeS is None:
                raise CompileException('FOR : invalid block', token=token)

            self.size += 4

            #nodeS.children.append(nodeIncrementation)

            nodeIf = Node(nodes_const.NODE_IF, children=[nodeCondition, nodeS, Node(nodes_const.NODE_BREAK, token=token)], token=token)
            nodeLoop = Node(nodes_const.NODE_LOOP, children=[nodeIncrementation, nodeIf], token=token)
            nodeBlock = Node(nodes_const.NODE_BLOCK, children=[nodeAffectation, nodeLoop], token=token)

            return nodeBlock

        # fin gestion for

        #debut gestion break

        if token.category == categories_const.TOKEN_BREAK:
            self.size += 1
            nextToken = self.lexical.nextToken()
            return Node(nodes_const.NODE_BREAK, token=token)

        #fin gestion break

        # debut gestion continue

        if token.category == categories_const.TOKEN_CONTINUE:
            self.size += 1
            nextToken = self.lexical.nextToken()
            return Node(nodes_const.NODE_CONTINUE, token=token)

        # fin gestion continue

        # debut gestion return

        if token.category == categories_const.TOKEN_RETURN:

            nextToken = self.lexical.nextToken()
            if nextToken is None:
                raise CompileException("return: Missing return value", token=token)

            children = []

            # With return value
            if nextToken.category != categories_const.TOKEN_SEMICOLON:
                nodeE = self.E(nextToken)
                if nodeE is None:
                    raise CompileException("return: Invalid return value", token=token)
                children.append(nodeE)
                nextToken = self.lexical.nextToken()

            if nextToken is None or nextToken.category != categories_const.TOKEN_SEMICOLON:
                raise CompileException("return: Missing semicolon", token=token)

            self.size += 1
            return Node(nodes_const.NODE_RETURN, children=children, token=token)

        # fin gestion return

        # debut gestion declaration

        if token.category == categories_const.TOKEN_INT:

            nextToken = self.lexical.nextToken()

            if nextToken is None:
                raise CompileException('DECLARATION : incomplete statement', token=token)
            #
            # if nextToken.category == categories_const.TOKEN_MULTIPLICATION:
            #     nodeP = self.P(nextToken)
            #     return nodeP

            if nextToken.category != categories_const.TOKEN_IDENT:
                raise CompileException('DECLARATION : Missing identifier', token=token)

            nextTokenAfterIdent = self.lexical.nextToken()

            if nextTokenAfterIdent is None or nextTokenAfterIdent.category != categories_const.TOKEN_SEMICOLON:
                raise CompileException('DECLARATION : Missing semicolon', token=token)
            else:
                self.size += 1
                return Node(nodes_const.NODE_VAR_DECL, children=[], identifier=nextToken.identifier, token=token)