def test_default_tokenizer(): tokenizer = DefaultTokenizer() actual_tokens = tokenizer.tokenize(template="* hello") expected_tokens = [ AnyToken(), CharToken(char=" "), CharToken(char="h"), CharToken(char="e"), CharToken(char="l"), CharToken(char="l"), CharToken(char="o"), ] assert actual_tokens == expected_tokens
def test_choice_token_find_nested(): tokenizer = DefaultTokenizer() token_in_template_finder = ChoiceTokenFinder() find_token_in_template_result = token_in_template_finder.find_in_template( template="[h[e|a]llo|hi]", tokenizer=tokenizer, ) expected_token = ChoiceToken(choices=[ [ CharToken(char="h"), ChoiceToken(choices=[[CharToken(char="e")], [CharToken( char="a")]]), CharToken(char="l"), CharToken(char="l"), CharToken(char="o"), ], [ CharToken(char="h"), CharToken(char="i"), ], ]) assert find_token_in_template_result.token == expected_token assert find_token_in_template_result.end == 14
def test_optional_token_match_with_message(): token = OptionalToken(tokens=[CharToken(char="h")]) token_ending_variants = token.match_with_message( message="hello", matcher=Matcher(tokenizer=DefaultTokenizer()), ) assert token_ending_variants == [0, 1]
def test_char_token_match_with_message_incorrect(): token = CharToken(char="a") token_ending_variants = token.match_with_message( message="hello", matcher=Matcher(tokenizer=DefaultTokenizer()), ) assert token_ending_variants == []
def test_any_token_match_with_message(): token = AnyToken() token_ending_variants = token.match_with_message( message="hello", matcher=Matcher(tokenizer=DefaultTokenizer()), ) assert token_ending_variants == [1, 2, 3, 4, 5]
def test_optional_token_none(): tokenizer = DefaultTokenizer() token_in_template_finder = OptionalTokenFinder() find_token_in_template_result = token_in_template_finder.find_in_template( template="hello", tokenizer=tokenizer, ) assert not find_token_in_template_result
def test_choice_token_find_without_finish(): tokenizer = DefaultTokenizer() token_in_template_finder = ChoiceTokenFinder() find_token_in_template_result = token_in_template_finder.find_in_template( template="[hello|hi", tokenizer=tokenizer, ) assert not find_token_in_template_result
def test_not_any_token(): tokenizer = DefaultTokenizer() token_in_template_finder = AnyTokenFinder() find_token_in_template_result = token_in_template_finder.find_in_template( template="hello", tokenizer=tokenizer, ) assert find_token_in_template_result is None
def test_optional_token_without_finish_char(): tokenizer = DefaultTokenizer() token_in_template_finder = OptionalTokenFinder() find_token_in_template_result = token_in_template_finder.find_in_template( template="((h)ello", tokenizer=tokenizer, ) assert not find_token_in_template_result
def test_generation(): tokenizer = DefaultTokenizer() generator = Generator(tokenizer=tokenizer) template = "he*o" message = generator.generate(template=template) assert message[0] == "h" assert message[1] == "e" assert message[-1] == "o" assert len(message) > 3
def test_any_token(): tokenizer = DefaultTokenizer() token_in_template_finder = AnyTokenFinder() find_token_in_template_result = token_in_template_finder.find_in_template( template="* hello", tokenizer=tokenizer, ) assert find_token_in_template_result assert find_token_in_template_result.token == AnyToken() assert find_token_in_template_result.end == 1
def test_char_token(): tokenizer = DefaultTokenizer() token_in_template_finder = CharTokenFinder() find_token_in_template_result = token_in_template_finder.find_in_template( template="hello", tokenizer=tokenizer, ) assert find_token_in_template_result assert find_token_in_template_result.token == CharToken(char="h") assert find_token_in_template_result.end == 1
def test_optional_token(): tokenizer = DefaultTokenizer() token_in_template_finder = OptionalTokenFinder() find_token_in_template_result = token_in_template_finder.find_in_template( template="(hello)", tokenizer=tokenizer, ) expected_token = OptionalToken(tokens=[ CharToken(char="h"), CharToken(char="e"), CharToken(char="l"), CharToken(char="l"), CharToken(char="o"), ], ) assert find_token_in_template_result assert find_token_in_template_result.token == expected_token assert find_token_in_template_result.end == 7
def test_choice_token_match_none(): token = ChoiceToken(choices=[ [ CharToken(char="h"), CharToken(char="e"), CharToken(char="l"), CharToken(char="l"), CharToken(char="o"), ], [ CharToken(char="h"), CharToken(char="i"), ], ]) token_ending_variants = token.match_with_message( message="eee", matcher=Matcher(tokenizer=DefaultTokenizer()), ) assert token_ending_variants == []
def __init__(self) -> None: tokenizer = DefaultTokenizer() super(DefaultMatcher, self).__init__(tokenizer=tokenizer, )
def __init__(self): tokenizer = DefaultTokenizer() super().__init__(tokenizer=tokenizer)