def test_entity_mention_ids(self): comm = read_test_comm() self.assertTrue(validate_communication(comm)) self.assertTrue(validate_entity_mention_ids(comm)) comm.entitySetList[0].entityList[0].mentionIdList[ 0] = concrete.UUID(uuidString='BAD_ENTITY_MENTION_UUID') with LogCapture() as log_capture: self.assertFalse(validate_entity_mention_ids(comm)) log_capture.check(('root', 'ERROR', StringComparison( r'.*invalid entityMentionId.*BAD_ENTITY_MENTION_UUID')))
def test_entity_mention_tokenization(): comm = read_test_comm() assert validate_communication(comm) assert validate_entity_mention_ids(comm) comm.entityMentionSetList[0].mentionList[0].tokens.tokenizationId = ( concrete.UUID(uuidString='BAD_TOKENIZATION_UUID')) with LogCapture() as log_capture: assert not validate_entity_mention_tokenization_ids(comm) log_capture.check( ('root', 'ERROR', StringComparison(r'.*invalid tokenizationId.*BAD_TOKENIZATION_UUID')))
def test_entity_mention_tokenization(self): comm = read_test_comm() self.assertTrue(validate_communication(comm)) self.assertTrue(validate_entity_mention_ids(comm)) comm.entityMentionSetList[0].mentionList[0].tokens.tokenizationId = ( concrete.UUID(uuidString='BAD_TOKENIZATION_UUID') ) with LogCapture() as log_capture: self.assertFalse(validate_entity_mention_tokenization_ids(comm)) log_capture.check(('root', 'ERROR', StringComparison( r'.*invalid tokenizationId.*BAD_TOKENIZATION_UUID')))
def test_entity_mention_ids(): comm = read_test_comm() assert validate_communication(comm) assert validate_entity_mention_ids(comm) comm.entitySetList[0].entityList[0].mentionIdList[0] = concrete.UUID( uuidString='BAD_ENTITY_MENTION_UUID') with LogCapture() as log_capture: assert not validate_entity_mention_ids(comm) log_capture.check( ('root', 'ERROR', StringComparison( r'.*invalid entityMentionId.*BAD_ENTITY_MENTION_UUID')))
def test_repr_on_comm(self): """Verify that Communications can be converted to strings. Checks for the issue addressed in this commit: commit 0ee3317454543b63dc7a273d92e5720bb9210b03 Author: Craig Harman <*****@*****.**> Date: Tue Dec 16 13:08:44 2014 -0500 Fixed infinite recursion bug in Tokenization.__repr__() The addition of an in-memory "backpointer" from a Tokenization to the Tokenization's enclosing Sentence inadvertently broke the (Thrift auto-generated) Tokenization.__repr__() function. Modified the function to ignore the backpointer when generating the string representation for a Tokenization. """ comm = read_test_comm() comm.__repr__()
def test_add_references(): comm = read_test_comm() add_references_to_communication(comm)
def test_add_references(self): comm = read_test_comm() add_references_to_communication(comm)