Ejemplo n.º 1
0
 def handleSyntaxDelimeter(self, currentNode, lastNode):
     token_type = lastNode.getTokenType()
     if token_type == TokenType.T_INTEGRAL:
         current_token_type = currentNode.getTokenType()
         possibilities = LexicalBuilderRule.getNextExpectedNodeType(
             current_token_type, token_type, {'contextNode': lastNode})
         currentNode.setContextType(token_type)
         currentNode.setPotentitalTypes(possibilities)
     return currentNode
Ejemplo n.º 2
0
 def handleVertical(self, node, verticalNode):
     if verticalNode.getTokenType() == TokenType.S_VERTICAL:
         builder = LexicalBuilderRule.getTypeRule(
             verticalNode.getTokenType())
         if builder.isFull(verticalNode):
             return self.parse(node, verticalNode)
         else:
             builder.consume(verticalNode, node)
         return verticalNode
Ejemplo n.º 3
0
 def test_swapLeftRight(self):
     data = LexicalBuilderRule.multiplyRule()
     multiplyNode = Node(TokenType.T_MULT)
     subtractNode = Node(TokenType.T_MINUS)
     num = Node(TokenType.T_NUM, "4")
     num2 = Node(TokenType.T_NUM, "5")
     num3 = Node(TokenType.T_NUM, "6")
     subtractNode.addChild(LexicalRuleItem.K_LEFT, num)
     subtractNode.addChild(LexicalRuleItem.K_RIGHT, num2)
     data.swapLeftRight(subtractNode, multiplyNode)
Ejemplo n.º 4
0
 def handleDelta(self, deltaNode, node):
     if deltaNode.getTokenType() == TokenType.T_DELTA:
         builder = LexicalBuilderRule.getTypeRule(deltaNode.getTokenType())
         if builder.isFull(deltaNode):
             return self.buildContextualMultiply(node, deltaNode)
         else:
             builder.consume(deltaNode, node)
         return deltaNode
     # returns the delta, so that we can add current context to the stack
     elif node.getTokenType() == TokenType.T_DELTA:
         return node
Ejemplo n.º 5
0
 def handleExpression(self, currentNode, lastNode=None):
     if lastNode == None:
         raise Exception("Expression expects more info")
     if lastNode.getTokenType(
     ) == TokenType.T_CONTEXT or lastNode.getTokenType() == TokenType.T_NUM:
         builder = LexicalBuilderRule.getTypeRule(
             currentNode.getTokenType())
         builder.consume(currentNode, lastNode)
         return currentNode
     else:
         raise Exception("unabled type, handleExpression")
Ejemplo n.º 6
0
    def handleNumber(self, currentNode, lastNode=None):
        if lastNode == None:
            return currentNode

        lastNodeIsSimple = self.tokenIsSimple(lastNode.getTokenType())

        if lastNodeIsSimple:
            builder = LexicalBuilderRule.getTypeRule(lastNode.getTokenType())
            builder.consume(lastNode, currentNode)
            return lastNode
        elif lastNode.getTokenType(
        ) == TokenType.T_CONTEXT or lastNode.getTokenType(
        ) == TokenType.T_NUM or lastNode.getTokenType(
        ) == TokenType.T_VARIABLE:
            return self.buildContextualMultiply(currentNode, lastNode)
        elif lastNode.isExpression():
            return self.parse(lastNode, currentNode)
        elif lastNode.token_type == TokenType.S_DELIMITER:  # Assuming that the delimiter will self-close,
            return currentNode
        else:
            raise Exception("Unexpected case: handleNumber")
Ejemplo n.º 7
0
 def handleSimple(self, currentNode, inputNode):
     if inputNode == None:
         raise Exception("The simple functions need an input before hand")
     else:
         builder = LexicalBuilderRule.getTypeRule(
             currentNode.getTokenType())
         swapped = False
         if currentNode.isOperator() and inputNode.isOperator():
             sortedNodes = builder.sortOperations([inputNode, currentNode])
             if sortedNodes[0] != inputNode:
                 builder.swapLeftRight(inputNode, currentNode)
                 swapped = True
         if swapped == False:
             builder.consume(currentNode, inputNode)
         # if len(s_list) < 2:
         #     raise Exception(
         #         "Unexpected lack of enough tokens, lexicalbuilder.py")
         # nextchar = s_list[1]
         # nextTokenType = self.getTokenType(nextchar)
         # nextNode = Node(nextTokenType, nextchar)
         # self.parse(nextNode, nextTokenType, currentNode, s_list[1:])
     return currentNode
Ejemplo n.º 8
0
 def lookBehind(self):
     builder = LexicalBuilderRule.getTypeRule(self.token_type, self)
     return builder.isNextRuleAfterSelf(self)
Ejemplo n.º 9
0
 def lookAhead(self):
     builder = LexicalBuilderRule.getTypeRule(self.token_type, self)
     return builder.isNextRuleBeforeSelf(self)
Ejemplo n.º 10
0
 def isClosing(self):
     if self.isSelfClosing():
         builder = LexicalBuilderRule.getTypeRule(self.token_type, self)
         return builder.isFull(self)
     return False
Ejemplo n.º 11
0
 def handleIntegral(self, currentNode, lastNode):
     builder = LexicalBuilderRule.getTypeRule(lastNode.getTokenType())
     builder.consume(currentNode, lastNode)
     return currentNode
Ejemplo n.º 12
0
 def test_MultiplyRule(self):
     data = LexicalBuilderRule.multiplyRule()
     self.assertNotEqual(data, None, "multiply rule was none")
     self.assertEqual(data.expectedItemOrder[0], LexicalRuleItem.K_LEFT)
Ejemplo n.º 13
0
 def test_sortOperations(self):
     data = LexicalBuilderRule.multiplyRule()
     multiplyNode = Node(TokenType.T_MULT)
     subtractNode = Node(TokenType.T_MINUS)
     order = data.sortOperations([subtractNode, multiplyNode])
     self.assertEqual(order[0], multiplyNode)