def main(args=None): parser = ArgumentParser() parser.add_argument('input_file') parser.add_argument('output_file') conf = parser.parse_args(args) with open(conf.input_file, 'r') as io: conllu_document = io.read() for document in read_into_documents(conllu_document): PickleSerializer.event_to_file( document.event, conf.output_file + '/' + document.event.event_id + '.pickle')
def test_concepts_performance(events_service, concepts_service, test_results): input_dir = Path(os.environ['BIOMEDICUS_TEST_DATA']) / 'concepts' recall = Accuracy(name='recall', mode='any', fields=['cui']) precision = Accuracy(name='precision', mode='any', fields=['cui']) with EventsClient(address=events_service) as client, \ Pipeline( RemoteProcessor(processor_id='biomedicus-concepts', address=concepts_service), LocalProcessor(Metrics(recall, tested='umls_concepts', target='gold_concepts'), component_id='metrics'), LocalProcessor(Metrics(precision, tested='gold_concepts', target='umls_concepts'), component_id='metrics_reverse'), events_client=client ) as pipeline: for test_file in input_dir.glob('**/*.pickle'): with PickleSerializer.file_to_event(test_file, client=client) as event: document = event.documents['plaintext'] pipeline.run(document) print('Precision:', precision.value) print('Recall:', recall.value) timing_info = pipeline.processor_timer_stats('biomedicus-concepts').timing_info test_results['Concepts'] = { 'Precision': precision.value, 'Recall': recall.value, 'Remote Call Duration': str(timing_info['remote_call'].mean), 'Process Method Duration': str(timing_info['process_method'].mean) } assert recall.value > 0.6
def test_tnt_performance(events_service, pos_tags_service, test_results): input_dir = Path(os.environ['BIOMEDICUS_TEST_DATA']) / 'pos_tags' accuracy = Accuracy() with EventsClient(address=events_service) as client, Pipeline( RemoteProcessor(processor_id='biomedicus-tnt-tagger', address=pos_tags_service, params={'token_index': 'gold_tags'}), LocalProcessor(Metrics(accuracy, tested='pos_tags', target='gold_tags'), component_id='metrics'), events_client=client) as pipeline: for test_file in input_dir.glob('**/*.pickle'): event = PickleSerializer.file_to_event(test_file, client=client) with event: document = event.documents['gold'] results = pipeline.run(document) print( 'Accuracy for event - ', event.event_id, ':', results.component_result( 'metrics').result_dict['accuracy']) print('Accuracy:', accuracy.value) pipeline.print_times() timing_info = pipeline.processor_timer_stats( 'biomedicus-tnt-tagger').timing_info test_results['TnT Pos Tagger'] = { 'Accuracy': accuracy.value, 'Remote Call Duration': str(timing_info['remote_call'].mean), 'Process Method Duration': str(timing_info['process_method'].mean) } assert accuracy.value > 0.9
def test_dependencies(events_service, dependencies_service, test_results): test_dir = Path(os.environ['BIOMEDICUS_TEST_DATA']) / 'dependencies' uas = Accuracy('UAS', equivalence_test=uas_equal) las = Accuracy('LAS', equivalence_test=las_equal) with EventsClient(address=events_service) as client, \ Pipeline( RemoteProcessor(processor_id='biomedicus-dependencies', address=dependencies_service), LocalProcessor(Metrics(uas, las, tested='dependencies', target='gold_dependencies'), component_id='accuracy', client=client) ) as pipeline: for test_file in test_dir.glob('**/*.pickle'): with PickleSerializer.file_to_event(test_file, client=client) as event: document = event.documents['plaintext'] results = pipeline.run(document) accuracy_dict = results.component_result( 'accuracy').result_dict print('Results for document: UAS: {}. LAS: {}.'.format( accuracy_dict['UAS'], accuracy_dict['LAS'])) print('UAS:', uas.value) print('LAS:', las.value) timing_info = pipeline.processor_timer_stats( 'biomedicus-dependencies').timing_info test_results['biomedicus-dependencies'] = { 'UAS': uas.value, 'LAS': las.value, 'Corpus': "MiPACQ converted to UD from PTB test set", 'Remote Call Duration': str(timing_info['remote_call'].mean), 'Process Method Duration': str(timing_info['process_method'].mean) }
def test_modification_detector_performance(events_service, modification_detector_service, test_results): input_dir = Path(os.environ['BIOMEDICUS_TEST_DATA']) / 'negation' / 'i2b2_2010' confusion = metrics.FirstTokenConfusion() metrics_processor = metrics.Metrics(confusion, tested='negated', target='i2b2concepts', target_filter=is_negated) with EventsClient(address=events_service) as client, Pipeline( RemoteProcessor('biomedicus-negation', address=modification_detector_service, params={'terms_index': 'i2b2concepts'}), LocalProcessor(metrics_processor, component_id='metrics', client=client) ) as pipeline: for test_file in input_dir.glob('**/*.pickle'): with PickleSerializer.file_to_event(test_file, client=client) as event: document = event.documents['plaintext'] results = pipeline.run(document) print('F1 for event - "{}": {:0.3f} - elapsed: {}'.format( event.event_id, results.component_result('metrics').result_dict['first_token_confusion']['f1'], results.component_result('biomedicus-negation').timing_info['process_method'] )) print('Overall Precision:', confusion.precision) print('Overall Recall:', confusion.recall) print('Overall F1:', confusion.f1) pipeline.print_times() timing_info = pipeline.processor_timer_stats('biomedicus-negation').timing_info test_results['biomedicus-modification'] = { 'Gold Standard': "2010 i2b2-VA", 'Precision': confusion.precision, 'Recall': confusion.recall, 'F1': confusion.f1, 'Per-Document Mean Remote Call Duration': str(timing_info['remote_call'].mean), 'Per-Document Mean Process Method Duration': str(timing_info['process_method'].mean) }
def test_yml_deserialization(): f = Path(__file__).parent / 'event.pickle' event = PickleSerializer.file_to_event(f) assert event.event_id == '12345' assert event.metadata['foo'] == 'bar' d = event.documents['plaintext'] assert d.text == "The quick brown fox jumps over the lazy dog." assert len(d.get_label_indices_info()) == 3 assert d.get_label_index("one") == [ label(start_index=0, end_index=10, a="b"), label(start_index=12, end_index=25, a="c"), label(start_index=26, end_index=52, a="d"), label(start_index=53, end_index=85, a="e"), ] assert d.get_label_index("two") == [ label(start_index=0, end_index=10, x=1), label(start_index=3, end_index=9, x=3), label(start_index=4, end_index=25, x=2), label(start_index=5, end_index=25, x=4), ] assert d.get_label_index("three") == [ label(start_index=0, end_index=10, x=True), label(start_index=3, end_index=9, x=True), label(start_index=4, end_index=25, x=False), label(start_index=5, end_index=25, x=False), ]
def test_pickle_serializer(): event = Event(event_id='1') event.metadata['foo'] = "bar" document = Document('plaintext', text='Some text.') event.add_document(document) one = label(start_index=0, end_index=5, x=10) two = label(start_index=6, end_index=10, x=15) document.add_labels('one', [one, two]) document.add_labels('two', [ label(start_index=0, end_index=25, a='b', b=one), label(start_index=26, end_index=42, a='c', b=two) ]) document.add_labels('three', [ label(start_index=0, end_index=10, foo=True), label(start_index=11, end_index=15, foo=False) ], distinct=True) with TemporaryFile('wb+') as tf: PickleSerializer.event_to_file(event, tf) tf.flush() tf.seek(0) e = PickleSerializer.file_to_event(tf) assert e.event_id == event.event_id assert e.metadata['foo'] == 'bar' d = e.documents['plaintext'] assert d.text == document.text index_one = d.labels['one'] assert index_one == [one, two] index_two = d.labels['two'] assert index_two == [ label(start_index=0, end_index=25, a='b', b=one), label(start_index=26, end_index=42, a='c', b=two) ] index_three = d.labels['three'] assert index_three == [ label(start_index=0, end_index=10, foo=True), label(start_index=11, end_index=15, foo=False) ]
def test_normalization(events_service, normalization_processor): with EventsClient(address=events_service) as client, \ Pipeline(RemoteProcessor(processor_id='biomedicus_normalizer', address=normalization_processor)) as pipeline, \ PickleSerializer.file_to_event(Path(__file__).parent / '97_95.pickle', client=client) as event: document = event.documents['plaintext'] pipeline.run(document) for norm_form in document.get_label_index('norm_forms'): if norm_form.text == "according": assert norm_form.norm == "accord" if norm_form.text == "expressing": assert norm_form.norm == "express" if norm_form.text == "receiving": assert norm_form.norm == "receive" if norm_form.text == "days": assert norm_form.norm == "day"
def test_pickle_serializer(): event = Event(event_id='1') event.metadata['foo'] = "bar" document = Document('plaintext', text='Some text.') event.add_document(document) document.add_labels('one', [ label(start_index=0, end_index=5, x=10), label(start_index=6, end_index=10, x=15) ]) document.add_labels('two', [ label(start_index=0, end_index=25, a='b'), label(start_index=26, end_index=42, a='c') ]) document.add_labels('three', [ label(start_index=0, end_index=10, foo=True), label(start_index=11, end_index=15, foo=False) ], distinct=True) with NamedTemporaryFile('rb') as tf: PickleSerializer.event_to_file(event, tf.name) tf.flush() tf.seek(0) o = pickle.load(tf) assert o['event_id'] == '1' assert o['metadata']['foo'] == 'bar' d = o['documents']['plaintext'] assert d['text'] == 'Some text.' assert len(d['label_indices']) == 3 assert d['label_indices']['one'] == { 'json_labels': [{ 'start_index': 0, 'end_index': 5, 'x': 10 }, { 'start_index': 6, 'end_index': 10, 'x': 15 }], 'distinct': False } assert d['label_indices']['two'] == { 'json_labels': [{ 'start_index': 0, 'end_index': 25, 'a': 'b' }, { 'start_index': 26, 'end_index': 42, 'a': 'c' }], 'distinct': False } assert d['label_indices']['three'] == { 'json_labels': [{ 'start_index': 0, 'end_index': 10, 'foo': True }, { 'start_index': 11, 'end_index': 15, 'foo': False }], 'distinct': True }