Ejemplo n.º 1
0
    def match(tokenizer: Tokenizer):
        token_string = ''
        word_start_pointer = tokenizer.pointer_at()

        while not tokenizer.reached_end():
            if not tokenizer.peek().isdigit():
                break
            token_string += tokenizer.consume()

        return Integer(word_start_pointer,
                       int(token_string)) if token_string else False
Ejemplo n.º 2
0
    def match_baseword(tokenizer: Tokenizer) -> Keywords:
        token_word = ''
        matched_word = ''
        pointer = tokenizer.pointer_at()

        auxiliary_keywords = Keywords.HACK_AUX + Keywords.DEL_AUX
        all_keywords = (Keywords.HACK, Keywords.DEL) + auxiliary_keywords

        while not tokenizer.reached_end():
            one_matched = False
            token_word += tokenizer.peek()
            for word in all_keywords:
                if word.startswith(token_word):
                    if not one_matched:
                        tokenizer.consume()
                    one_matched = True

                    if word == token_word:
                        if matched_word:
                            tokenizer.purge_snapshot()
                        tokenizer.take_snapshot()
                        matched_word = word
                        break
            else:
                if not one_matched and matched_word:
                    tokenizer.rollback_snapshot()
                    break
                if not one_matched and not matched_word:
                    return False
        else:
            return False

        if matched_word in auxiliary_keywords:
            auxiliary_word = matched_word
            matched_word = Keywords.HACK if matched_word in Keywords.HACK_AUX else Keywords.DEL
            Warnings.add_warning(
                HackersDelightMatcher.LazyPersonDetected(
                    pointer, auxiliary_word, matched_word))

        return matched_word
    def match_baseword(tokenizer: Tokenizer) -> Keywords:
        token_word = ''
        matched_word = ''
        pointer = tokenizer.pointer_at()

        auxiliary_keywords = Keywords.HACK_AUX + Keywords.DEL_AUX
        all_keywords = (Keywords.HACK, Keywords.DEL) + auxiliary_keywords

        while not tokenizer.reached_end():
            one_matched = False
            token_word += tokenizer.peek()
            for word in all_keywords:
                if word.startswith(token_word):
                    if not one_matched:
                        tokenizer.consume()
                    one_matched = True

                    if word == token_word:
                        if matched_word:
                            tokenizer.purge_snapshot()
                        tokenizer.take_snapshot()
                        matched_word = word
                        break
            else:
                if not one_matched and matched_word:
                    tokenizer.rollback_snapshot()
                    break
                if not one_matched and not matched_word:
                    return False
        else:
            return False

        if matched_word in auxiliary_keywords:
            auxiliary_word = matched_word
            matched_word = Keywords.HACK if matched_word in Keywords.HACK_AUX else Keywords.DEL
            Warnings.add_warning(HackersDelightMatcher.LazyPersonDetected(pointer, auxiliary_word, matched_word))

        return matched_word