def test_get_pos_no_tagging(tokenization): tokenization.tokenTaggingList = filter( lambda ttl: ttl.taggingType != 'POS', tokenization.tokenTaggingList ) with raises(Exception): get_pos(tokenization)
def test_get_pos_non_unique_tagging(self): self.tokenization.tokenTaggingList.append( TokenTagging( taggingType='POS', taggedTokenList=[ TaggedToken(tokenIndex=0, tag='N'), TaggedToken(tokenIndex=1, tag='X'), TaggedToken(tokenIndex=2, tag='N'), ], ), ) with self.assertRaises(Exception): get_pos(self.tokenization)
def test_get_pos_non_unique_tagging(tokenization): tokenization.tokenTaggingList.append( TokenTagging( taggingType='POS', taggedTokenList=[ TaggedToken(tokenIndex=0, tag='N'), TaggedToken(tokenIndex=1, tag='X'), TaggedToken(tokenIndex=2, tag='N'), ], ), ) with raises(Exception): get_pos(tokenization)
def test_get_pos_no_tagging(self): self.tokenization.tokenTaggingList = filter( lambda ttl: ttl.taggingType != 'POS', self.tokenization.tokenTaggingList) with self.assertRaises(Exception): get_pos(self.tokenization)
def test_get_pos(self): self.assertEqual(['N', 'N', 'X'], map(lambda t: t.tag, get_pos(self.tokenization))) self.assertEqual([0, 1, 2], map(lambda t: t.tokenIndex, get_pos(self.tokenization)))
def test_get_pos(tokenization): assert ['N', 'N', 'X'] == map(lambda t: t.tag, get_pos(tokenization)) assert [0, 1, 2] == map(lambda t: t.tokenIndex, get_pos(tokenization))