Esempio n. 1
0
 def parseAssignment(self,tokens):
     for i in range(len(tokens)):
         if token('operator','=') == tokens[i]:
             if i == 0:
                 raise ExpressionError("Assignement is not done this way")
             return tree('=',tokens[:i][0],self.parseAssignment(tokens[i+1:]))
     return self.parseParens(tokens)
Esempio n. 2
0
 def parseAssignment(self,tokens):
     for i in range(len(tokens)):
         if token('operator','=') == tokens[i]:
             if i == 0:
                 raise ExpressionError("Assignement is not done this way")
             return tree('=',tokens[:i][0],self.parseAssignment(tokens[i+1:]))
     return self.parseParens(tokens)
Esempio n. 3
0
 def parseAdditive(self,tokens):
     i = 1
     while i<len(tokens):
         if tokens[i] in ['+','-']:
             return tree(tokens[i].value,self.parseMultiplicative(tokens[:i]),self.parseAdditive(tokens[i+1:]))
         i+=1
     if len(tokens) == 2 and tokens[0] in ['+','-']:
         return token(tokens[1].type,"".join([tokens[0].value,tokens[1].value]))
     return self.parseMultiplicative(tokens)
Esempio n. 4
0
 def parseAdditive(self,tokens):
     i = 1
     while i<len(tokens):
         if tokens[i] in ['+','-']:
             return tree(tokens[i].value,self.parseMultiplicative(tokens[:i]),self.parseAdditive(tokens[i+1:]))
         i+=1
     if len(tokens) == 2 and tokens[0] in ['+','-']:
         return token(tokens[1].type,"".join([tokens[0].value,tokens[1].value]))
     return self.parseMultiplicative(tokens)
Esempio n. 5
0
 def parseParens(self,tokens):
     i = 0
     while i < len(tokens):
         if tokens[i] == token('operator','('):
             openCount = 1
             start = i
             i +=1
             while i < len(tokens) and openCount:
                 if tokens[i] == token('operator','('):
                     openCount+=1
                 if tokens[i] == token('operator',')'):
                     openCount-=1
                 if openCount == 0:
                     tokens[start:i+1] = [self.parseParens(tokens[start+1:i])]
                     i = start
                 i+=1
             if openCount != 0:
                 raise ExpressionError("Parens problem")
         i +=1
     return self.parseAdditive(tokens)
Esempio n. 6
0
 def parseParens(self,tokens):
     i = 0
     while i < len(tokens):
         if tokens[i] == token('operator','('):
             openCount = 1
             start = i
             i +=1
             while i < len(tokens) and openCount:
                 if tokens[i] == token('operator','('):
                     openCount+=1
                 if tokens[i] == token('operator',')'):
                     openCount-=1
                 if openCount == 0:
                     tokens[start:i+1] = [self.parseParens(tokens[start+1:i])]
                     i = start
                 i+=1
             if openCount != 0:
                 raise ExpressionError("Parens problem")
         i +=1
     return self.parseAdditive(tokens)
Esempio n. 7
0
def main():

    # check for two command line (action & name)
    if (len(sys.argv) == 3):
        # Check action type
        if (sys.argv[1].lower() == "searchable"):
            # Create file step1.txt
            file = open("step1.txt", "w")
            cwd = "/home"
            search(sys.argv[2], cwd, file)
            file.close()
        elif (sys.argv[1].lower() == "token"):
            token("step1.txt")
        elif (sys.argv[1].lower() == "stopword"):
            stopword("step2.txt")
        elif (sys.argv[1].lower() == "stem"):
            stem("step3.txt")
        else:
            print("This is where we check for other commands ")
    else:
        print("You need an action and name in command line. Terminated")
Esempio n. 8
0
 def parsePower(self,tokens):
     i = 0
     while i< len(tokens):
         if tokens[i] == token('operator','^'):
             return tree(tokens[i].value,self.parseFunctions(tokens[:i]),self.parseFunctions(tokens[i+1:]))
         i+=1
     if len(tokens) > 1:
         return self.parseFunctions(tokens)
     if not len(tokens):
         raise ExpressionError("Multiplication is not done this way")
     if tokens[0].type not in ['identifier','number','node']:
         raise ExpressionError("Operator and operator? Not good")
     return tokens[0]
Esempio n. 9
0
 def parsePower(self,tokens):
     i = 0
     while i< len(tokens):
         if tokens[i] == token('operator','^'):
             return tree(tokens[i].value,self.parseFunctions(tokens[:i]),self.parseFunctions(tokens[i+1:]))
         i+=1
     if len(tokens) > 1:
         return self.parseFunctions(tokens)
     if not len(tokens):
         raise ExpressionError("Multiplication is not done this way")
     if tokens[0].type not in ['identifier','number','node']:
         raise ExpressionError("Operator and operator? Not good")
     return tokens[0]
Esempio n. 10
0
import tokenizer

__author__ = 'Roland'

t = tokenizer.Tokenizer()
t.tokenize("1+2")
assert (t.tokenize("1+2") == [
    tokenizer.token('number', '1'),
    tokenizer.token('operator', '+'),
    tokenizer.token('number', '2')
])
assert (t.tokenize("1+2") == ['1', "+", '2'])
assert (t.tokenize("(x+y)*2/4") == [
    tokenizer.token('operator', '('),
    tokenizer.token('identifier', 'x'),
    tokenizer.token('operator', '+'),
    tokenizer.token('identifier', 'y'),
    tokenizer.token('operator', ')'),
    tokenizer.token('operator', '*'),
    tokenizer.token('number', '2'),
    tokenizer.token('operator', '/'),
    tokenizer.token('number', '4')
])
assert (t.tokenize("(x+(y*z+2))-3*((5+x)/2-4)") == [
    tokenizer.token('operator', '('),
    tokenizer.token('identifier', 'x'),
    tokenizer.token('operator', '+'),
    tokenizer.token('operator', '('),
    tokenizer.token('identifier', 'y'),
    tokenizer.token('operator', '*'),
    tokenizer.token('identifier', 'z'),
Esempio n. 11
0
import tokenizer

__author__ = 'Levon'

t = tokenizer.Tokenizer()
t.tokenize("1+2")
assert (t.tokenize("1+2") == [tokenizer.token('number','1'),tokenizer.token('operator','+'),tokenizer.token('number','2')])
assert (t.tokenize("1+2") == ['1',"+",'2'])
assert (t.tokenize("(x+y)*2/4") == [tokenizer.token('operator','('),tokenizer.token('identifier','x'),tokenizer.token('operator','+'),tokenizer.token('identifier','y'),
                                   tokenizer.token('operator',')'),tokenizer.token('operator','*'),tokenizer.token('number','2'),tokenizer.token('operator','/'),
                                   tokenizer.token('number','4')])
assert(t.tokenize("(x+(y*z+2))-3*((5+x)/2-4)") == [tokenizer.token('operator','('),
                                                   tokenizer.token('identifier','x'),
                                                   tokenizer.token('operator','+'),
                                                   tokenizer.token('operator','('),
                                                   tokenizer.token('identifier','y'),
                                                   tokenizer.token('operator','*'),
                                                   tokenizer.token('identifier','z'),
                                                   tokenizer.token('operator','+'),
                                                   tokenizer.token('number','2'),
                                                   tokenizer.token('operator',')'),
                                                   tokenizer.token('operator',')'),
                                                   tokenizer.token('operator','-'),
                                                   tokenizer.token('number','3'),
                                                   tokenizer.token('operator','*'),
                                                   tokenizer.token('operator','('),
                                                   tokenizer.token('operator','('),
                                                   tokenizer.token('number','5'),
                                                   tokenizer.token('operator','+'),
                                                   tokenizer.token('identifier','x'),
                                                   tokenizer.token('operator',')'),
Esempio n. 12
0
import tokenizer

__author__ = 'Roland'

t = tokenizer.Tokenizer()
t.tokenize("1+2")
assert (t.tokenize("1+2") == [tokenizer.token('number','1'),tokenizer.token('operator','+'),tokenizer.token('number','2')])
assert (t.tokenize("1+2") == ['1',"+",'2'])
assert (t.tokenize("(x+y)*2/4") == [tokenizer.token('operator','('),tokenizer.token('identifier','x'),tokenizer.token('operator','+'),tokenizer.token('identifier','y'),
                                   tokenizer.token('operator',')'),tokenizer.token('operator','*'),tokenizer.token('number','2'),tokenizer.token('operator','/'),
                                   tokenizer.token('number','4')])
assert(t.tokenize("(x+(y*z+2))-3*((5+x)/2-4)") == [tokenizer.token('operator','('),
                                                   tokenizer.token('identifier','x'),
                                                   tokenizer.token('operator','+'),
                                                   tokenizer.token('operator','('),
                                                   tokenizer.token('identifier','y'),
                                                   tokenizer.token('operator','*'),
                                                   tokenizer.token('identifier','z'),
                                                   tokenizer.token('operator','+'),
                                                   tokenizer.token('number','2'),
                                                   tokenizer.token('operator',')'),
                                                   tokenizer.token('operator',')'),
                                                   tokenizer.token('operator','-'),
                                                   tokenizer.token('number','3'),
                                                   tokenizer.token('operator','*'),
                                                   tokenizer.token('operator','('),
                                                   tokenizer.token('operator','('),
                                                   tokenizer.token('number','5'),
                                                   tokenizer.token('operator','+'),
                                                   tokenizer.token('identifier','x'),
                                                   tokenizer.token('operator',')'),