def match(tokenizer: Tokenizer): token_string = '' word_start_pointer = tokenizer.pointer_at() while not tokenizer.reached_end(): if not tokenizer.peek().isdigit(): break token_string += tokenizer.consume() return Integer(word_start_pointer, int(token_string)) if token_string else False
def start(tokenizer: Tokenizer): token_stream = TokenStream([]) while not tokenizer.reached_end(): word_start_pointer = tokenizer.pointer_at() token = Matcher.match_primary_keyword(tokenizer) if isinstance(token, HackersDelightMatcher.WordOrder): token = Matcher.match_indicator_keyword(token, word_start_pointer, tokenizer) if not token: Warnings.add_exception(Matcher.WhatTheHellManException(tokenizer.pointer_at())) tokenizer.consume() else: token_stream.append(token) return token_stream
def start(tokenizer: Tokenizer): token_stream = TokenStream([]) while not tokenizer.reached_end(): word_start_pointer = tokenizer.pointer_at() token = Matcher.match_primary_keyword(tokenizer) if isinstance(token, HackersDelightMatcher.WordOrder): token = Matcher.match_indicator_keyword( token, word_start_pointer, tokenizer) if not token: Warnings.add_exception( Matcher.WhatTheHellManException(tokenizer.pointer_at())) tokenizer.consume() else: token_stream.append(token) return token_stream
def match(tokenizer: Tokenizer): tokenizer.take_snapshot() first_word = HackersDelightMatcher.match_baseword(tokenizer) second_word = HackersDelightMatcher.match_baseword(tokenizer) if first_word else False couple = (first_word, second_word) if couple == (Keywords.HACK, Keywords.DEL): tokenizer.purge_snapshot() return HackersDelightMatcher.WordOrder.HD if couple == (Keywords.DEL, Keywords.HACK): tokenizer.purge_snapshot() return HackersDelightMatcher.WordOrder.DH tokenizer.rollback_snapshot() return False
def match(tokenizer: Tokenizer): tokenizer.take_snapshot() first_word = HackersDelightMatcher.match_baseword(tokenizer) second_word = HackersDelightMatcher.match_baseword( tokenizer) if first_word else False couple = (first_word, second_word) if couple == (Keywords.HACK, Keywords.DEL): tokenizer.purge_snapshot() return HackersDelightMatcher.WordOrder.HD if couple == (Keywords.DEL, Keywords.HACK): tokenizer.purge_snapshot() return HackersDelightMatcher.WordOrder.DH tokenizer.rollback_snapshot() return False
def match(tokenizer: Tokenizer, baseword_order: HackersDelightMatcher.WordOrder, pointer): tokenizer.take_snapshot() if not tokenizer.reached_end() and tokenizer.consume() == Keywords.COND: if baseword_order == HackersDelightMatcher.WordOrder.HD: return Condition(pointer) elif baseword_order == HackersDelightMatcher.WordOrder.DH: return JumpBack(pointer) else: tokenizer.rollback_snapshot() return False
def match(tokenizer: Tokenizer, baseword_order: HackersDelightMatcher.WordOrder, pointer): tokenizer.take_snapshot() if not tokenizer.reached_end() and tokenizer.consume() == Keywords.IO_CHAR: if baseword_order == HackersDelightMatcher.WordOrder.HD: return OutputCharCell(pointer) elif baseword_order == HackersDelightMatcher.WordOrder.DH: return InputCharCell(pointer) else: tokenizer.rollback_snapshot() return False
def match(tokenizer: Tokenizer, baseword_order: HackersDelightMatcher.WordOrder, pointer): tokenizer.take_snapshot() if not tokenizer.reached_end() and tokenizer.consume( ) == Keywords.MV_PTR: if baseword_order == HackersDelightMatcher.WordOrder.HD: return PointerRight(pointer) elif baseword_order == HackersDelightMatcher.WordOrder.DH: return PointerLeft(pointer) else: tokenizer.rollback_snapshot() return False
def match_baseword(tokenizer: Tokenizer) -> Keywords: token_word = '' matched_word = '' pointer = tokenizer.pointer_at() auxiliary_keywords = Keywords.HACK_AUX + Keywords.DEL_AUX all_keywords = (Keywords.HACK, Keywords.DEL) + auxiliary_keywords while not tokenizer.reached_end(): one_matched = False token_word += tokenizer.peek() for word in all_keywords: if word.startswith(token_word): if not one_matched: tokenizer.consume() one_matched = True if word == token_word: if matched_word: tokenizer.purge_snapshot() tokenizer.take_snapshot() matched_word = word break else: if not one_matched and matched_word: tokenizer.rollback_snapshot() break if not one_matched and not matched_word: return False else: return False if matched_word in auxiliary_keywords: auxiliary_word = matched_word matched_word = Keywords.HACK if matched_word in Keywords.HACK_AUX else Keywords.DEL Warnings.add_warning( HackersDelightMatcher.LazyPersonDetected( pointer, auxiliary_word, matched_word)) return matched_word
def start(text: str): tokenizer = Tokenizer(''.join(text.split())) token_stream = Matcher.start(tokenizer) return token_stream
def match_baseword(tokenizer: Tokenizer) -> Keywords: token_word = '' matched_word = '' pointer = tokenizer.pointer_at() auxiliary_keywords = Keywords.HACK_AUX + Keywords.DEL_AUX all_keywords = (Keywords.HACK, Keywords.DEL) + auxiliary_keywords while not tokenizer.reached_end(): one_matched = False token_word += tokenizer.peek() for word in all_keywords: if word.startswith(token_word): if not one_matched: tokenizer.consume() one_matched = True if word == token_word: if matched_word: tokenizer.purge_snapshot() tokenizer.take_snapshot() matched_word = word break else: if not one_matched and matched_word: tokenizer.rollback_snapshot() break if not one_matched and not matched_word: return False else: return False if matched_word in auxiliary_keywords: auxiliary_word = matched_word matched_word = Keywords.HACK if matched_word in Keywords.HACK_AUX else Keywords.DEL Warnings.add_warning(HackersDelightMatcher.LazyPersonDetected(pointer, auxiliary_word, matched_word)) return matched_word