コード例 #1
0
ファイル: cell.py プロジェクト: tactycHQ/Saturn
    def formula(self, excel_formula):
        '''
        If excel formula is set, this TRIGGERS creation of rpn formula and tree
        @param excel_formula: excel formula as a string
        @return: rpn formula
        '''
        self._formula = excel_formula
        logging.debug("Processing RPN for formula {} at cell {}".format(
            excel_formula, self))

        #First check if formula starts with correct operator
        if str(excel_formula).startswith(('=', '+')):
            self.rpn = self.make_rpn(excel_formula)

            # creates list of precedents (who do I depend on)
            self.createPrec()

        # This means formula must be a hardcode
        else:
            logging.debug(
                "Formula does not start with = or +. Creating a hardcode cell")
            if isinstance(fast_real(self.address), str):
                tok = Token(self.address, Token.OPERAND, "TEXT")
                self.rpn.append(OperandNode(tok))
                self.needs_calc = False
            else:
                tok = Token(self.address, Token.OPERAND, "NUMBER")
                self.rpn.append(OperandNode(tok))

        logging.info("RPN is: {}".format(self.rpn))
コード例 #2
0
 def parse_return_statement(self):
     node = Token('returnStatement', [])
     self.try_add(node, 'keyword', value='return')
     if self.token.value != ';':
         node.append(self.parse_expression())
     self.try_add(node, 'symbol', value=';')
     return node
コード例 #3
0
 def test_middle_nonapha(self):
     s = list(
         self.t.tokenize_gen_alpha_digit(
             'я иду в кино00000 111 00000cinema'))
     self.assertEqual(len(s), 8)
     self.assertEqual(s[4], Token('00000', 'digit', 12, 17))
     self.assertEqual(s[5], Token('111', 'digit', 18, 21))
コード例 #4
0
 def parse_do_statement(self):
     node = Token('doStatement', [])
     self.try_add(node, 'keyword', value='do')
     for token in self.parse_subroutine_call():
         node.append(token)
     self.try_add(node, 'symbol', value=';')
     return node
コード例 #5
0
ファイル: parser_test.py プロジェクト: yortuc/lispip
def test_parse_simple_tokens():
    tokens = tokenize('(add 1 2)')
    result = parse(tokens)

    assert result.name == 'add' and result.params == [
        Token('word', '1'), Token('word', '2')
    ]
コード例 #6
0
 def is_statement(self):
     return self.token in (
         Token('keyword', 'let'), 
         Token('keyword', 'if'),
         Token('keyword', 'while'),
         Token('keyword', 'do'),   
         Token('keyword', 'return')
     )
コード例 #7
0
    def test_split(self):
        text = 'This is a test'
        tokenz = self._tokz.tokenize(text)

        self.assertEqual(tokenz[0], Token(text, Span(0, 4)))
        self.assertEqual(tokenz[1], Token(text, Span(5, 7)))
        self.assertEqual(tokenz[2], Token(text, Span(8, 9)))
        self.assertEqual(tokenz[3], Token(text, Span(10, 14)))
コード例 #8
0
    def test_stemmer_flex(self): 

        line = "мамочка свари суп"

        fact = list(Stemmer().stem_flex(Token(0, 8, "мамочка свари суп", "a")))
        check = [Token(0, 8, line, 'a'), Token(0, 7, line, 'a')]

        self.assertEqual(fact, check)
コード例 #9
0
 def test_exception(self):
     expr = "a > 3and3>5"
     tokenizer = Tokenizer(expr)
     self.assertEqual(Token(TokenType.FIELD, 'a'), tokenizer.next_token())
     self.assertEqual(Token(TokenType.COMPARISON_OPERATOR, '>'),
                      tokenizer.next_token())
     self.assertEqual(Token(TokenType.INTEGER, 3), tokenizer.next_token())
     self.assertRaises(SyntaxError, tokenizer.next_token)
コード例 #10
0
 def parse_var_declaration(self):
     node = Token('varDec', [])
     self.try_add(node, 'keyword', value='var')
     node.append(self.parse_type())
     self.try_add(node, 'identifier')
     while self.token.value == ',':
         self.try_add(node, 'symbol', value=',')
         self.try_add(node, 'identifier')
     self.try_add(node, 'symbol', value=';')
     return node
コード例 #11
0
ファイル: parser_test.py プロジェクト: yortuc/lispip
def test_parse_list():
    tokens = tokenize("'(1 2 3)")
    result = parse(tokens)
    expected_list_items = [
        Token('word', '1'),
        Token('word', '2'),
        Token('word', '3')
    ]
    assert result.token_type == 'list' and all(
        [a == b for a, b in zip(result.val, expected_list_items)])
コード例 #12
0
 def test_tokenize_basic01(self):
     result = tokenize("(+ 5 2)")
     expected = [
         Token(TokenType.LPAREN, None),
         Token(TokenType.PLUS, None),
         Token(TokenType.INTEGER, 5),
         Token(TokenType.INTEGER, 2),
         Token(TokenType.RPAREN, None)
     ]
     self.assertListEqual(result, expected)
コード例 #13
0
 def test_tokenize_eq(self):
     result = tokenize("(eq? 2 2)")
     expected = [
         Token(TokenType.LPAREN, None),
         Token(TokenType.EQ, None),
         Token(TokenType.INTEGER, 2),
         Token(TokenType.INTEGER, 2),
         Token(TokenType.RPAREN, None)
     ]
     self.assertEqual(result, expected)
コード例 #14
0
 def test_tokenize_define(self):
     result = tokenize("(define cat 5)")
     expected = [
         Token(TokenType.LPAREN, None),
         Token(TokenType.DEFINE, None),
         Token(TokenType.ID, 'cat'),
         Token(TokenType.INTEGER, 5),
         Token(TokenType.RPAREN, None)
     ]
     self.assertEqual(result, expected)
コード例 #15
0
 def test_tokenize_basic02(self):
     result = tokenize("(* 3 4)")
     expected = [
         Token(TokenType.LPAREN, None),
         Token(TokenType.MULTIPLY, None),
         Token(TokenType.INTEGER, 3),
         Token(TokenType.INTEGER, 4),
         Token(TokenType.RPAREN, None)
     ]
     self.assertListEqual(result, expected)
コード例 #16
0
 def test_single_number(self):
     token = Token()
     token.type = "number"
     token.value = "123.456"
     tokens = [token]
     self.parser.set_tokens(tokens)
     expression_tree = self.parser.get_expression_tree()
     assert expression_tree is not None
     assert expression_tree["node_type"] == "number"
     assert expression_tree["value"] == 123.456
コード例 #17
0
    def test_specials(self):
        text = 'This Dr. is a test!'
        #       0123456789012345678
        tokenz = self._tokz.tokenize(text)

        self.assertEqual(tokenz, [Token(text, Span(0, 4)),
                                  Token(text, Span(5, 8)),
                                  Token(text, Span(9, 11)),
                                  Token(text, Span(12, 13)),
                                  Token(text, Span(14, 19))])
コード例 #18
0
 def test_single_identifier(self):
     token = Token()
     token.type = "ident"
     token.name = "VP1"
     tokens = [token]
     self.parser.set_tokens(tokens)
     expression_tree = self.parser.get_expression_tree()
     assert expression_tree is not None
     assert expression_tree["node_type"] == "ident"
     assert expression_tree["name"] == "VP1"
コード例 #19
0
 def addToken(self, token: Token, doc_id: int):
     p: Posting
     for p in self._list:
         if p.getDocId() == doc_id:
             p.addPosition(token.getPosition())
             self._save()
             return
     p = Posting(doc_id)
     p.addPosition(token.getPosition())
     self._list.append(p)
     self._save()
コード例 #20
0
ファイル: tests.py プロジェクト: MattSegal/nand2tetris
    def test_parse_parameter_list_empty(self):
        """
        ( (type identifier) (',' type identifier)*)?
        """
        tokens = (Token('symbol',')'), )
        expected = Token('parameterList', [])

        parser = Parser(tokens)
        parser.parse_type = self._mock_parse(parser)
        
        actual = parser.parse_parameter_list()
        self.assertEqual(expected, actual)
コード例 #21
0
ファイル: tokenizer_test.py プロジェクト: yortuc/lispip
def test_tokenize_list():
    result = tokenize("'(1 2 3)")

    expected = [
        Token('list_open'),
        Token('word', '1'),
        Token('word', '2'),
        Token('word', '3'),
        Token('list_close')
    ]

    assert all([a == b for a, b in zip(result, expected)])
コード例 #22
0
ファイル: parser_test.py プロジェクト: yortuc/lispip
def test_parse_cascaded_parans():
    tokens = tokenize('(add (mul 3 4) 5)')
    result = parse(tokens)

    expected_params = [
        Func('mul',
             [Token('word', '3'), Token('word', '4')]),
        Token('word', '5')
    ]

    assert result.name == 'add' and all(
        [a == b for a, b in zip(result.params, expected_params)])
コード例 #23
0
ファイル: compiler.py プロジェクト: devchas/nandCompiler
	def advance(self):
		if (self.tokCnt + 1) < len(self.tokens):
			self.tokCnt += 1
			self.nextCnt = self.tokCnt + 1
			self.curTok = Token(self.tokens[self.tokCnt])
			self.curOut = self.curTok.tknOut()
			self.curType = self.curTok.tokenType()
			if self.nextCnt < len(self.tokens):
				self.nextTok = Token(self.tokens[self.nextCnt])
				self.nextOut = self.nextTok.tknOut()
				self.nextType = self.nextTok.tokenType()
		return
コード例 #24
0
    def process(tokenlist):
        i = 0
        while i < len(tokenlist.tokens):
            token = tokenlist.tokens[i]
            t = token.type

            if t == TYPE_CALL:
                token2 = token.value[1]
                t2 = token2.type
                if t2 == TYPE_FUNCTION:
                    v2 = token2.value
                    if v2 == "if":
                        #first we must determine that there is in fact a body following this function.
                        if i+1 < len(tokenlist.tokens) and tokenlist.tokens[i+1].type == TYPE_BLOCK_START and \
                         tokenlist.tokens[i + 1].value == BLOCK_START_CHAR:
                            pass
                        else:
                            error_format(
                                token, "\"if\" should be followed by a block.")

                        #only if the body following this if-function has an "else" will this goto be added.
                        index = find_endblock_token_index(
                            tokenlist.tokens, i + 2)
                        if index + 1 < len(tokenlist.tokens) and tokenlist.tokens[index + 1].type == TYPE_TERM and \
                         tokenlist.tokens[index + 1].value == "else":
                            end_of_chain = find_endblock_token_index(
                                tokenlist.tokens, i)
                            tokenlist.tokens.insert(
                                index,
                                Token(TYPE_GOTO, end_of_chain, None, None))
                            increment_gotos_pointing_after_here(
                                tokenlist, index)
                    elif v2 == "while":
                        # first we must determine that there is in fact a body following this function.
                        if i + 1 < len(tokenlist.tokens) and tokenlist.tokens[i + 1].type == TYPE_BLOCK_START and \
                            tokenlist.tokens[i + 1].value == BLOCK_START_CHAR:
                            pass
                        else:
                            error_format(
                                token,
                                "\"while\" should be followed by a body.")

                        #Next we place a goto at the end of that body to point back at this while-function's args.
                        index = find_endblock_token_index(
                            tokenlist.tokens, i + 1)
                        goto = find_startblock_token_index(
                            tokenlist.tokens, i - 3)
                        tokenlist.tokens.insert(
                            index - 1, Token(TYPE_GOTO, goto, None, None))
                        increment_gotos_pointing_after_here(tokenlist, index)
            i += 1
コード例 #25
0
ファイル: tokenizer_test.py プロジェクト: yortuc/lispip
def test_tokenize_with_spaces():
    result = tokenize('''(mul 
                            2 
                            3
                         )''')
    expected = [
        Token('open'),
        Token('word', 'mul'),
        Token('word', '2'),
        Token('word', '3'),
        Token('close')
    ]

    assert all([a == b for a, b in zip(result, expected)])
コード例 #26
0
ファイル: asts.py プロジェクト: raghav198/sapphire-python
    def execute(self, scope):
        # scope[self.dest.value] = self.val.execute(scope)
        val = self.val.execute(scope)
        if type(val) is AtomAST:
            scope[self.dest.value] = self.val
        elif type(val) is int:
            scope[self.dest.value] = AtomAST(Token(TokenType.NUM, val))
        elif type(val) is str:
            scope[self.dest.value] = AtomAST(Token(TokenType.STR, val))
        else:
            scope[self.dest.value] = AtomAST(Token(TokenType.STR, val))
            print('Warning: value of type {} is not supported!'.format(type(val)))

        return self.val
コード例 #27
0
 def compile_do(self, parse_tree):
     log.info('Compiling do statement')
     assert parse_tree.type == 'doStatement'
     subroutine_term = Token('term', parse_tree.value[1:-1])
     return self.compile_expression(subroutine_term) + (
         'pop temp 0\n'  # Clear return value
     )
コード例 #28
0
 def parse_subroutine_call(self):
     """
     identifier '(' expressionList ')' | 
     identifier '.' identifier '(' expressionList ')'
     """
     node = Token('subroutineCall', [])
     self.try_add(node, 'identifier')
     if self.token.value == '.':
         self.try_add(node, 'symbol', '.')
         self.try_add(node, 'identifier')
     self.try_add(node, 'symbol', '(')
     node.append(self.parse_expression_list())
     self.try_add(node, 'symbol', ')')
     # Not used in XML format
     for token in node.value:
         yield token
コード例 #29
0
 def _statement_list(self, additional_syncset=frozenset()):
     self._check_for_starter(self.stmt_starter_label,
                             self._follow_dl | additional_syncset, self._ID)
     sl = Atom(Token(self.sl_label, self.list_category,
                     self._curr.location))
     first_set = self._first_loop_sl if 'rompe' in additional_syncset else self._first_sl
     while self._curr.lexeme in first_set or self._check_id_num(self._ID):
         if self._curr.lexeme == 'if':
             self._selection(additional_syncset).parent = sl
         elif self._curr.lexeme == 'while':
             self._iteration(additional_syncset).parent = sl
         elif self._curr.lexeme == 'repeat':
             self._repetition(additional_syncset).parent = sl
         elif self._curr.lexeme == 'cin':
             self._cin_stmt(additional_syncset).parent = sl
         elif self._curr.lexeme in ('cout', 'coutln'):
             self._cout_stmt(additional_syncset).parent = sl
         elif self._curr.lexeme == '{':
             self._block(additional_syncset).parent = sl
         elif self._curr.lexeme == 'rompe':
             self._break_stmt(additional_syncset).parent = sl
         elif self._check_id_num(self._ID):
             self._assignment(additional_syncset).parent = sl
         elif self._curr.lexeme in self._first_una:
             self._pre(additional_syncset).parent = sl
         self._check_for_starter(self.stmt_starter_label,
                                 self._follow_dl | additional_syncset,
                                 self._ID)
     return sl
コード例 #30
0
ファイル: ASTParser.py プロジェクト: RojerGS/Roj
 def get_control(self, token_list):
     tokens = token_list[::]
     
     if token_list[0].get_type() not in [Token.STOP, Token.RETURN,
                                 Token.JUMPOVER, Token.HALT]:
         return None, tokens
     tok = token_list[0]
     token_list = token_list[1:]
     if tok.get_type() in [Token.STOP, Token.JUMPOVER]:
         sub = Literal(Token(Token.NULL, "Null"))
     else:
         sub, token_list = self.get_expression(token_list)
         if sub is None:
             sub = Literal(Token(Token.NULL, "Null"))
             
     return Control(tok, parent=None, child=sub), token_list
コード例 #31
0
    def parse_class_var_declaration(self):
        """
        ('static' | 'field' ) type varName (',' varName)* ';' 
        """
        node = Token('classVarDec', [])
        self.try_add(node, 'keyword', value='static^field')
        node.append(self.parse_type())

        self.try_add(node, 'identifier')

        while self.token.value == ',':
            self.try_add(node, 'symbol', value=',')
            self.try_add(node, 'identifier')
 
        self.try_add(node, 'symbol', value=';')
        return node
コード例 #32
0
def parse(tokens):
    stack = deque()

    for k in tokens:
        if k.token_type == 'open' or k.token_type == 'word' or k.token_type == 'list_open':
            stack.append(k)

        elif k.token_type == 'close':
            # create function
            sub = []
            while True:
                cur = stack.pop()
                if cur.token_type == 'open':
                    break
                else:
                    sub = [cur] + sub
            result = Func(sub[0].val, sub[1:])
            stack.append(result)

        elif k.token_type == 'list_close':
            sub = []
            while True:
                cur = stack.pop()
                if cur.token_type == 'list_open':
                    break
                else:
                    sub = [cur] + sub
            result = Token('list', sub)
            stack.append(result)

    if len(stack) == 1:
        return stack.pop()
    else:
        return list(stack)
コード例 #33
0
 def _pre(self, additional_syncset):
     operator = Atom(self._curr, self._curr.lexeme[0])
     self._get_token()
     var = Atom(self._curr, f'{self.assignment_label} Ø')
     temp = self._curr.lexeme
     if self._sync(self._ID,
                   frozenset({';'}) | self._follow_dl | additional_syncset,
                   self._ID):
         var.lexeme = f'{self.assignment_label} {temp}'
     operator.parent = var
     Atom(Token(var.lexeme.split()[-1], var.category, operator.location),
          parent=operator,
          _inc_dec=True)
     Atom(Token('1', self.tokenizer.int_label, operator.location),
          parent=operator)
     self._sync(';', self._follow_dl | additional_syncset, self._ID)
     return var
コード例 #34
0
ファイル: compiler.py プロジェクト: devchas/nandCompiler
	def __init__(self, file):
		self.file = file
		fileTxt = fileIn(self.file).fileText()
		self.tokens = FlProc(fileTxt).tokenize()
		self.t = []
		self.vm = []
		self.tokCnt = 0
		self.nextCnt = self.tokCnt + 1
		self.curTok = Token(self.tokens[self.tokCnt])
		self.nextTok = Token(self.tokens[self.nextCnt])
		self.curOut = self.curTok.tknOut()
		self.nextOut = self.nextTok.tknOut()
		self.curType = self.curTok.tokenType()
		self.nextType = self.nextTok.tokenType()
		self.subTbl = None
		self.exprCnt = 0
		self.wLoop = 0
		self.wEnd = 0
		self.ifYes = 0
		self.ifNo = 0
コード例 #35
0
ファイル: compiler.py プロジェクト: devchas/nandCompiler
class Compiler(object):

	classVarDec = ['static', 'field']
	varType = ['int', 'char', 'boolean']
	subroutineDec = ['constructor', 'function', 'method']
	statements = ['let', 'if', 'while', 'do', 'return']
	ops = ['+', '-', '*', '/', '&', '|', '<', '>', '=']
	urnaryOps = ['-', '~']
	keyConsts = ['true', 'false', 'null', 'this']
	
	def __init__(self, file):
		self.file = file
		fileTxt = fileIn(self.file).fileText()
		self.tokens = FlProc(fileTxt).tokenize()
		self.t = []
		self.vm = []
		self.tokCnt = 0
		self.nextCnt = self.tokCnt + 1
		self.curTok = Token(self.tokens[self.tokCnt])
		self.nextTok = Token(self.tokens[self.nextCnt])
		self.curOut = self.curTok.tknOut()
		self.nextOut = self.nextTok.tknOut()
		self.curType = self.curTok.tokenType()
		self.nextType = self.nextTok.tokenType()
		self.subTbl = None
		self.exprCnt = 0
		self.wLoop = 0
		self.wEnd = 0
		self.ifYes = 0
		self.ifNo = 0
		
	# append text
	def appTxt(self, text, adv=0):
		#print(text)
		self.t.append(text)
		self.addAdv(adv)
		return

	# append text for each token until token == string then addAdv 'adv' times
	def appUntil(self, string, adv=0):
		while (self.curOut != string):
			self.addAdv()
		self.addAdv(adv)
		return

	# advance to next token and update all relevant variables
	def advance(self):
		if (self.tokCnt + 1) < len(self.tokens):
			self.tokCnt += 1
			self.nextCnt = self.tokCnt + 1
			self.curTok = Token(self.tokens[self.tokCnt])
			self.curOut = self.curTok.tknOut()
			self.curType = self.curTok.tokenType()
			if self.nextCnt < len(self.tokens):
				self.nextTok = Token(self.tokens[self.nextCnt])
				self.nextOut = self.nextTok.tknOut()
				self.nextType = self.nextTok.tokenType()
		return

	# add xml format and advance
	def addAdv(self, num=1):
		if num > 0:
			for _ in range(num):
				self.appTxt(self.curTok.xmlFrmt())
				self.advance()
		return
		
	# starts compilation process
	def constructor(self):
		if self.curOut == 'class':
			self.compileClass()
		return
			
	# append tokens until token = '{' (inclusive), compile any vars or subroutines
	def compileClass(self):
		self.tbl = Table()
		self.appTxt('<class>')
		self.clName = self.nextOut
		self.appUntil('{', 1)
		self.compileClassVarDec()
		self.compileSubroutine()
		self.appUntil('}', 1)
		self.appTxt('</class>')
		self.makeFile()
		print(self.tbl.hash)
		return
		
	# for each set of var declarations, add xml tags
	def compileClassVarDec(self):
		if self.curOut in Compiler.classVarDec:
			self.appTxt('<classVarDec>')
			self.addSymbolsToTable(self.getKindType(), self.tbl)
			self.addAdv()	# passes over semi-colon at end of var declaration
			self.appTxt('</classVarDec>')
			self.compileClassVarDec()
		return

	# gets the kind and type for at least one symbol of the same type (list separated by commas)
	def getKindType(self):
		tFields = []
		for i in range(0, 2):
			tFields.append(self.curOut)
			self.addAdv()
		return tFields

	# adds a new line to current symbol table if multiple of same kind as designated by comma
	def addSymbolsToTable(self, fields, table):
		tFields = [fields[0], fields[1], self.curOut]
		table.appSymbol(tFields)
		self.addAdv()
		if self.curOut == ',':
			self.addAdv()
			self.addSymbolsToTable(fields, table)
		return

	def compileSubroutine(self):
		if self.curOut in Compiler.subroutineDec:
			self.appTxt('<subroutineDec>')
			self.subTbl = Table()
			fncInfo = self.getFncInfo()
			self.appUntil('(', 1)
			self.appTxt('<parameterList>')
			args = self.compileParameterList()
			self.appTxt('</parameterList>', 1)	#	closing paren
			self.writeFnc(fncInfo, args)
			self.compileSubBody()
			self.appTxt('</subroutineDec>')
			print(self.subTbl.hash)
			self.compileSubroutine()
		return

	def writeFnc(self, info, args):
		if info['kind'] == 'method':
			args += 1
		fnc = 'function ' + self.clName + '.' + info['name'] + ' ' + str(args)
		self.vm.append(fnc)
		# sets pointer reference based on 'hidden' first argument for methods
		if info['kind'] == 'method':
			self.vm.append('push argument 0')
			self.vm.append('pop pointer 0')
		# constructor - calls memory.alloc method (1 argument for size); set pointer address
		elif info['kind'] == 'constructor':
			self.vm.append('push constant ' + str(self.calcFieldVars()))
			self.vm.append('call Memory.alloc 1')
			self.vm.append('pop pointer 0')
		return

	# returns dict of fnc kind, type and name
	def getFncInfo(self):
		fncInfo = {}
		fncInfo['kind'] = self.curOut
		self.addAdv()
		fncInfo['type'] = self.curOut
		fncInfo['name'] = self.nextOut
		return fncInfo

	def compileParameterList(self, paramCnt=0):
		params = paramCnt
		if self.curOut != ')':
			params += 1
			pFields = []
			for i in range(0, 2):
				pFields.append(self.curOut)
				self.addAdv()
			pFields = ['argument', pFields[0], pFields[1]]
			self.subTbl.appSymbol(pFields)
			if self.curOut == ',':
				self.addAdv()
			params = self.compileParameterList(params)
		return params

	def compileSubBody(self):
		self.appTxt('<subroutineBody>', 1)	#	open curly brace
		self.compileVarDec()
		self.appTxt('<statements>')
		self.compileStatements()
		self.appTxt('</statements>', 1)
		self.appTxt('</subroutineBody>')
		return

	def compileVarDec(self):
		if self.curOut == 'var':
			self.appTxt('<varDec>')
			self.addSymbolsToTable(self.getKindType(), self.subTbl)
			self.addAdv()
			self.appTxt('</varDec>')
			self.compileVarDec()
		return

	def compileStatements(self):
		if self.curOut in Compiler.statements:
			if self.curOut == 'do':
				self.compileDo()
			elif self.curOut == 'let':
				 self.compileLet()
			elif self.curOut == 'return':
				self.compileReturn()
			elif self.curOut == 'while':
				self.compileWhile()
			elif self.curOut == 'if':
				self.compileIf()
			self.compileStatements()
		return


	def compileDo(self):
		self.appTxt('<doStatement>')
		# advances over 'do' keyword
		self.addAdv()
		# creates function call; if method, "class." elif function "thisClass."
		if self.addClassToFncCall() == False:
			self.fncCall = 'call ' + self.clName + '.' + self.curOut
			# if function, advance over fnc name and open paren
			self.addAdv(2)
		else:
			# if method, advances over "."
			self.addAdv()
			# adds method name
			self.addMethodToFncCall()
			# advances over method name and open paren
			self.addAdv(2)
		self.compileExpressionList()
		self.callFnc()
		self.appUntil(';', 1)
		# pops and disregards output if called function
		self.vm.append('pop temp 0')
		self.appTxt('</doStatement>')
		return

	def compileLet(self):
		arrIndex = None
		self.appTxt('<letStatement>')
		self.addAdv()	#	let statement
		# tuple of (0) kind, (1) index
		varFields = self.getVarFields(self.curOut)
		self.addAdv()	#	var name
		# advances over open bracket and compiles expression upon reaching array item
		if self.curOut == '[':
			self.addAdv()
			arrIndex = self.getVarFields(self.curOut)
			# self.compileExpression()
		# appUntil should still work with array var because needs to advance over closed bracket
		self.appUntil('=', 1)
		# clear value of pop expression (not sure this is necessary)
		self.fncCall = None

		self.compileExpression()

		self.callFnc()
		
		# end assignment pop
		# array pop
		if arrIndex:
			thatRef = varFields[1] + arrIndex[1]
			self.vm.append('pop that ' + str(thatRef))
		# ordinary pop
		else:
			popStmnt = 'pop ' +  varFields[0] + ' ' + str(varFields[1])
			self.vm.append(popStmnt)
		
		self.appTxt('</letStatement>')
		return

	def compileReturn(self):
		self.appTxt('<returnStatement>', 1)
		if self.curOut != ';':
			self.compileExpression()
		else:
			self.vm.append('push constant 0')
			self.addAdv()
		self.appTxt('</returnStatement>')	
		return

	def compileWhile(self):
		self.appTxt('<whileStatement>')

		thiswLoop = self.wLoop
		self.vm.append('label wLoop' + str(thiswLoop))
		self.wLoop += 1

		self.appUntil('(', 1)
		self.compileExpression()
		
		thiswEnd = self.wEnd
		self.vm.append('if-goto wEnd' + str(thiswEnd))
		self.wEnd += 1
		self.newStatement()
		
		self.addAdv()
		self.vm.append('goto wLoop' + str(thiswLoop))
		self.appTxt('</whileStatement>')
		self.vm.append('label wEnd' + str(thiswEnd))
		return

	def compileIf(self):
		self.appTxt('<ifStatement>')
		self.appUntil('(', 1)
		self.compileExpression()

		thisIfYes = self.ifYes
		self.vm.append('if-goto ifYes' + str(thisIfYes))
		self.ifYes += 1

		thisIfNo = self.ifNo
		self.vm.append('goto ifNo' + str(thisIfNo))
		self.ifNo += 1

		self.vm.append('label ifYes' + str(thisIfYes))

		self.newStatement()
		self.addAdv()	# advances over closed curly brace
		self.vm.append('label ifNo' + str(thisIfNo))
		if self.curOut == 'else':
			self.addAdv()
			self.newStatement()
			self.addAdv()	# advances over closed curly brace
		self.appTxt('</ifStatement>')
		return

	def newStatement(self):
		if self.curOut == '{':
			self.addAdv()
			self.appTxt('<statements>')
			self.compileStatements()
			self.appTxt('</statements>')
		return
		
	def compileExpression(self):
		self.appTxt('<expression>')
		self.compileTerm()
		self.appTxt('</expression>')
		# advance and new expression upon reaching comma
		if self.curOut == ',':
			self.exprCnt += 1
			self.addAdv()
			self.compileExpression()
		# advances over closed bracket (end of array item)
		elif self.curOut == ']':
			self.addAdv()
		# advances 1 step upon reaching semi-colon (end expression)
		elif self.nextOut != ';':
			self.addAdv()
		return
		
	def compileTerm(self, oper=None):
		runOper = True
		# stops compiling terms upon reaching the following symbols
		if self.curOut not in [')', ';', ',', ']']:
			# operator advance (not a term)
			if self.curOut in Compiler.ops:
				oper = self.curOut
				runOper = False
				self.addAdv()
			else:
				self.appTxt('<term>')
				# open paren signifies beginning of new expression
				if self.curOut in ['(', '[']:
					self.addAdv()
					self.compileExpression()
				# advances over unary operator and compiles next term
				elif self.curOut in Compiler.urnaryOps:
					oper = self.curOut
					runOper = False
					self.addAdv()
					self.compileTerm(oper)
				elif self.curOut == '"':
						self.compileStr()
				else:
					# push expr if not object or array 
					if self.addClassToFncCall() == False and self.isArray() == False:
						self.writePush()
					elif self.isArray() == True:
						varFields = self.getVarFields(self.curOut)
						self.addAdv(2)
						arrIndex = self.getVarFields(self.curOut)
						thatRef = varFields[1] + arrIndex[1]
						self.vm.append('push that ' + str(thatRef))
						self.addAdv()
					self.addAdv()
					# signifies object instance and advances to compile expression list
					if self.curOut == '.':
						self.addMethodToFncCall()
						self.appUntil('(', 1)
						self.compileExpressionList()
				self.appTxt('</term>')
			self.compileTerm(oper)
			if oper and runOper == True:
				self.writeOper(oper)
		return

	def writeOper(self, oper):
		ops = {
			"+": "add",
			"-": "sub",
			"*": "call Math.multipy()",
			"/": "call Math.divide()",
			"&": "and",
			"|": "or",
			"<": "lt",
			">": "gt",
			"=": "eq",
			"~": "not"
		}
		op = ops[oper]
		self.vm.append(op)	
		return

	def isArray(self):
		if self.nextOut == '[':
			return True
		else:
			return False

	# fnc call = "class." (method added later)
	def addClassToFncCall(self):
		if self.nextOut == '.':
			# Pushes method's object onto stack as hidden first param
			# only if first letter lowercase (meaning is an object - not a class)
			if self.curOut[0].isupper() == False:
				self.writePush()
			self.fncCall = 'call ' + self.curOut + self.nextOut
			return True
		else:
			return False

	# adds method to fnc call so is "class.method"
	def addMethodToFncCall(self):
		self.fncCall = self.fncCall + self.nextOut
		return

	def callFnc(self):
		if self.fncCall:
			self.vm.append(self.fncCall)
			self.fncCall = None
			self.exprCnt = 0
		return

	# write push statement
	# currently only handles int constants and expressions - need to implement string constants
	def writePush(self):
		keyDict = {'true': 'constant 1', 'false': 'constant 0', 'null': 'constant 0', 'this': 'pointer 0'}
		pushExpr = ''
		if self.curType == 'integerConstant':
			pushExpr = 'push contstant ' + str(self.curOut)
		elif self.curOut in keyDict:
			pushExpr = 'push ' + keyDict[self.curOut]
		else:
			varFields = self.getVarFields(self.curOut)
			if varFields:
				pushExpr = 'push ' + varFields[0] + ' ' + str(varFields[1]) + ': ' + self.curOut
		self.vm.append(pushExpr)
		if self.curOut == 'true':
			self.vm.append('neg')
		return

	def writePop(self, key):
		varFields = self.getVarFields(key)
		popExpr = 'pop ' + varFields[0] + ' ' + str(varFields[1])
		self.vm.append(popExpr)
		return

	def compileExpressionList(self, adv=0):
		self.appTxt('<expressionList>')
		if self.curOut != ')':
			# initializes expression count to 1
			self.exprCnt += 1
			self.compileExpression()
		self.appTxt('</expressionList>', 1)
		# add argument count to fnc call -> "class.method #args"
		self.fncCall = self.fncCall + ' ' + str(self.exprCnt)
		self.addAdv(adv)
		return

	# returns tuple of (0) kind and (1) index for given key
	# checks presence in prodecure-level table first then class-level table
	def getVarFields(self, key):
		if self.subTbl:
			if key in self.subTbl.hash:
				fields = self.subTbl.getIndexKind(key)
			else:
				fields = self.checkClassTable(key)
		else:
			fields = self.checkClassTable(key)
		return fields

	def checkClassTable(self, key):
		if key in self.tbl.hash:
			fields = self.tbl.getIndexKind(key)
		else:
			fields = None
		return fields

	# calculates number of field vars by iterating over table hash and counting number with kind = field
	# result will be pushed onto stack for constructor
	def calcFieldVars(self):
		fieldCnt = 0
		for key in self.tbl.hash:
			if self.tbl.hash[key]['kind'] == 'field':
				fieldCnt += 1
		return fieldCnt

	# tokenizer returns all words as single terms - creates string contstant type and string without quotes here
	def compileStr(self):
		self.advance()
		strCon = strTok().strAppend()
		self.curType = 'stringConstant' 
		newString = str(strCon)
		self.appTxt('<' + self.curType + '>' + newString + '</' + self.curType + '>')
		strLen = len(newString)
		self.vm.append('call String.new(' + str(strLen) + ')')
		for i in newString:
			self.vm.append('String.appendChar(' + i + ')')
		self.advance()
		return

	def makeFile(self):
		#print(self.t)
		# for i in self.vm:
		# 	print(i)
		print(str(self.tokCnt + 1) + ' of ' + str(len(self.tokens)))
		fileOut(self.t, fileNm, 'xml').write()
		fileOut(self.vm, fileNm, 'vm').write()