def test_tokenise_phrase_with_punctuation(syllable1, punctuation, whitespace,
                                          syllable2):
    assume(syllable1 or syllable2)
    phrase = [syllable1, punctuation, whitespace, syllable2]
    assert tokenise(''.join(phrase)) == [word for word in phrase if word]
def test_tokenise_words(syllable1, whitespace, syllable2):
    assume(syllable1 or syllable2)
    words = [syllable1, whitespace, syllable2]
    assert tokenise(''.join(words)) == [word for word in words if word]
def test_tokenise_words_with_punctuation(syllable1, punctuation, syllable2):
    assume(syllable1 or syllable2)
    words = [syllable1, punctuation, syllable2]
    assert tokenise(''.join(words)) == [word for word in words if word]
def test_tokenise_single_syllable(syllable):
    assume(syllable)
    assert tokenise(syllable) == [syllable]
def test_tokenise_multiple_syllable_word(syllable1, syllable2):
    assume(syllable1 and syllable2)
    word = ''.join([syllable1, syllable2])
    assume(re.fullmatch(syllable_re, word) is None)
    assert tokenise(word) == [syllable1, syllable2]
def test_tokenise_punctuation_then_whitespace(punctuation, whitespace):
    formatting = ''.join([punctuation, whitespace])
    assert tokenise(formatting) == [punctuation, whitespace]
def test_tokenise_uppercase(lowercase):
    assume(lowercase)
    assert tokenise(lowercase.upper()) == tokenise(lowercase)
def test_tokenise_multiple_punctuation(punctuation1, punctuation2):
    punctuation = ''.join([punctuation1, punctuation2])
    assert tokenise(punctuation) == [punctuation1, punctuation2]
def test_tokenise_punctuation(punctuation):
    assert tokenise(punctuation) == [punctuation]
def test_tokenise_contiguous_whitespace(whitespace):
    assert tokenise(whitespace * 2) == [whitespace]
def test_tokenise_whitespace(whitespace):
    assert tokenise(whitespace) == [whitespace]
def test_tokenise_empty_seq():
    assert not tokenise('')