def test_pack_search_index():
    pattern_analyzer = PatternAnalyzer(
            name="test_analyzer",
            flags=["CANON_EQ"]
        )
    analyzers = []
    analyzers.append(pattern_analyzer)
    pattern_tokenizer = PatternTokenizer(
        name="test_tokenizer",
        flags=["CANON_EQ"]
    )
    tokenizers = []
    tokenizers.append(pattern_tokenizer)
    index = SearchIndex(
        name="test",
        fields=None,
        analyzers=analyzers,
        tokenizers=tokenizers
    )
    result = pack_search_index(index)
    assert isinstance(result.analyzers[0], _PatternAnalyzer)
    assert isinstance(result.analyzers[0].flags, str)
    assert result.analyzers[0].flags == "CANON_EQ"
    assert isinstance(result.tokenizers[0], _PatternTokenizer)
    assert isinstance(result.tokenizers[0].flags, str)
    assert result.tokenizers[0].flags == "CANON_EQ"
def test_multi_pack_search_index():
    pattern_analyzer = PatternAnalyzer(name="test_analyzer",
                                       flags=["CANON_EQ", "MULTILINE"])
    analyzers = []
    analyzers.append(pattern_analyzer)
    pattern_tokenizer = PatternTokenizer(name="test_analyzer",
                                         flags=["CANON_EQ", "MULTILINE"])
    tokenizers = []
    tokenizers.append(pattern_tokenizer)
    index = SearchIndex(name="test",
                        fields=None,
                        analyzers=analyzers,
                        tokenizers=tokenizers)
    result = index._to_generated()
    assert isinstance(result.analyzers[0], _PatternAnalyzer)
    assert isinstance(result.analyzers[0].flags, str)
    assert result.analyzers[0].flags == "CANON_EQ|MULTILINE"
    assert isinstance(result.tokenizers[0], _PatternTokenizer)
    assert isinstance(result.tokenizers[0].flags, str)
    assert result.tokenizers[0].flags == "CANON_EQ|MULTILINE"
def test_delistize_multi_flags_for_index():
    pattern_analyzer = PatternAnalyzer(name="test_analyzer",
                                       flags=["CANON_EQ", "MULTILINE"])
    analyzers = []
    analyzers.append(pattern_analyzer)
    pattern_tokenizer = PatternTokenizer(name="test_analyzer",
                                         flags=["CANON_EQ", "MULTILINE"])
    tokenizers = []
    tokenizers.append(pattern_tokenizer)
    index = SearchIndex(name="test",
                        fields=None,
                        analyzers=analyzers,
                        tokenizers=tokenizers)
    result = delistize_flags_for_index(index)
    assert isinstance(result.analyzers[0], _PatternAnalyzer)
    assert isinstance(result.analyzers[0].flags, str)
    assert result.analyzers[0].flags == "CANON_EQ|MULTILINE"
    assert isinstance(result.tokenizers[0], _PatternTokenizer)
    assert isinstance(result.tokenizers[0].flags, str)
    assert result.tokenizers[0].flags == "CANON_EQ|MULTILINE"