def test_get_covered_text_sentences(sentences): cas = Cas() cas.sofa_string = "Joe waited for the train . The train was late ." actual_text = [cas.get_covered_text(sentence) for sentence in sentences] expected_text = ["Joe waited for the train .", "The train was late ."] assert actual_text == expected_text
def test_FeatureStructure_get_covered_text_tokens(tokens): cas = Cas() cas.sofa_string = "Joe waited for the train . The train was late ." actual_text = [token.get_covered_text() for token in tokens] expected_text = ["Joe", "waited", "for", "the", "train", ".", "The", "train", "was", "late", "."] assert actual_text == expected_text
def test_get_view_finds_existing_view(): cas = Cas() cas.create_view("testView") cas.sofa_string = "Initial" view = cas.get_view("testView") view.sofa_string = "testView42" sofa = view.get_sofa() attr.validate(sofa) assert sofa.sofaID == "testView" assert cas.sofa_string == "Initial" assert view.sofa_string == "testView42"
def test_add_annotation(small_typesystem_xml): typesystem = load_typesystem(small_typesystem_xml) TokenType = typesystem.get_type("cassis.Token") cas = Cas(typesystem) cas.sofa_string = "Joe waited for the train ." tokens = [ TokenType(begin=0, end=3, id="0", pos="NNP"), TokenType(begin=4, end=10, id="1", pos="VBD"), TokenType(begin=11, end=14, id="2", pos="IN"), TokenType(begin=15, end=18, id="3", pos="DT"), TokenType(begin=19, end=24, id="4", pos="NN"), TokenType(begin=25, end=26, id="5", pos="."), ] for token in tokens: cas.add_annotation(token) actual_tokens = list(cas.select(TokenType.name)) assert actual_tokens == tokens
def test_sofa_string_can_be_set_and_read(): cas = Cas() cas.sofa_string = "I am a test sofa string!" assert cas.sofa_string == "I am a test sofa string!"