def parse_asn_statement(): tokenizer = get_tokenizer() tokenizer.pop_and_check(YOU) token = tokenizer.top() if token == MAKE: statement = RecipeASN() statement.parse() return statement elif token == CAN: return parse_buy_statement() raise ParseError("Invalid token at line {0}.".format(tokenizer.get_line()))
def parse_list(end_token): results = [] tokenizer = get_tokenizer() while tokenizer.top() != end_token: full_identifier = parse_name([SEPARATOR, end_token]) if tokenizer.top() == SEPARATOR: tokenizer.pop() if full_identifier: results.append(full_identifier) tokenizer.pop_and_check(end_token) return results
def parse_name(end_tokens): full_identifier = "" tokenizer = get_tokenizer() while tokenizer.has_next() and tokenizer.top() not in end_tokens: if len(full_identifier) > 0: full_identifier += " " full_identifier += tokenizer.pop() if not tokenizer.has_next(): raise ParseError( "Expected one of the following tokens {0} at line {1}.".format( end_tokens, tokenizer.get_line())) return full_identifier
def parse_statement(): tokenizer = get_tokenizer() token = tokenizer.top() if token == NEW: return parse_dec_statement() if token == YOU: return parse_asn_statement() if token == CALL: statement = ProcCall() statement.parse() return statement if token == PRINT: statement = PrintStatement() statement.parse() return statement raise ParseError("Invalid token {0} at line {1}.".format( token, tokenizer.get_line()))
def parse_dec_statement(): tokenizer = get_tokenizer() tokenizer.pop_and_check(NEW) token = tokenizer.top() statement = None if token == INGREDIENTS: statement = IngredientsDec() elif token == STORES: statement = StoresDec() elif token == MEASUREMENTS: statement = MeasuresDec() elif token == RECIPES: statement = RecipesDec() if statement is None: raise ParseError("Invalid token at line {0}.".format( tokenizer.get_line())) statement.parse() return statement
def parse_buy_statement(): tokenizer = get_tokenizer() tokenizer.pop_and_check(CAN) tokenizer.pop_and_check(BUY) list = parse_list(AT) store_name = parse_name([END_OF_LINE, FOR]) token = tokenizer.top() statement = None if token == END_OF_LINE: statement = StoreASN(list, store_name) elif token == FOR: statement = PartCostASN(list, store_name) if statement is None: raise ParseError("Invalid token at line {0}.".format( tokenizer.get_line())) statement.parse() return statement
def __init__(self): self.tokenizer = get_tokenizer() self.output = get_output() self.symbol_table = get_symbol_table()