def list_notes(output, data_node_host, dataset_id, fhir_store_id): """Gets clinical notes of a NLP data node FHIR store.""" clinical_notes = client.list_notes(host=data_node_host, dataset_id=dataset_id, fhir_store_id=fhir_store_id) # Stdout or store to json utils.stdout_or_json(list(clinical_notes), output)
def list_datasets(data_node_host, output): """List annotations of a NLP data node annotation store.""" # Create annotation store object datasets = client.list_datasets( host=data_node_host ) utils.stdout_or_json(list(datasets), output)
def list_annotations(data_node_host, dataset_id, annotation_store_id, output): """List annotations of a NLP data node annotation store.""" # Create annotation store object annotations = client.list_annotations( host=data_node_host, dataset_id=dataset_id, annotation_store_id=annotation_store_id ) utils.stdout_or_json(list(annotations), output)
def get_annotation(data_node_host, dataset_id, annotation_store_id, annotation_id, output): """Get annotation for a NLP data node dataset.""" annotation = client.get_annotation( host=data_node_host, dataset_id=dataset_id, annotation_store_id=annotation_store_id, annotation_id=annotation_id ) utils.stdout_or_json(annotation.to_dict(), output)
def annotate_note(annotator_host, note_json, output, tool_type): """Annotate a note with specified annotator""" with open(note_json, "r") as note_f: notes = json.load(note_f) all_annotations = [] for note in notes: note_name = note.pop("note_name") annotations = client.annotate_note(host=annotator_host, note=note, tool_type=tool_type) annotations['annotationSource'] = { "resourceSource": { "name": note_name } } all_annotations.append(annotations) utils.stdout_or_json(all_annotations, output)
def evaluate_prediction(pred_filepath, gold_filepath, output, tool_type): """Evaluate the performance of a prediction file. Example prediction and goldstandard files are found in test/data/new_prediction.json and test/data/new_goldstandard.json respectively. """ eval_mapping = { "nlpsandbox:date-annotator": evaluation.DateEvaluation, "nlpsandbox:person-name-annotator": evaluation.PersonNameEvaluation, "nlpsandbox:physical-address-annotator": evaluation.PhysicalAddressEvaluation } evaluator = eval_mapping[tool_type]() evaluator.convert_dict(pred_filepath, gold_filepath) results = evaluator.eval() utils.stdout_or_json(results, output)
def evaluate_prediction(pred_filepath, gold_filepath, output, tool_type): """Evaluate the performance of a prediction file. Example prediction and goldstandard files are found in test/data/new_prediction.json and test/data/new_goldstandard.json respectively. """ eval_mapping = { "nlpsandbox:date-annotator": evaluation.DateEvaluation, "nlpsandbox:person-name-annotator": evaluation.PersonNameEvaluation, "nlpsandbox:location-annotator": evaluation.LocationEvaluation, 'nlpsandbox:id-annotator': evaluation.IdEvaluation, 'nlpsandbox:contact-annotator': evaluation.ContactEvaluation, 'nlpsandbox:covid-symptom-annotator': evaluation.CovidSymptomEvaluation } print(f"eval_mapping: {eval_mapping}") evaluator = eval_mapping[tool_type]() evaluator.convert_dict(sys_file=pred_filepath, gs_file=gold_filepath) results = evaluator.eval() utils.stdout_or_json(results, output)
def get_tool(annotator_host, output): """Get tool information""" tool = client.get_tool(host=annotator_host) utils.stdout_or_json(tool.to_dict(), output)
def store_dataset(data_node_host, dataset_id, output): """Create a dataset in the data node""" # Create dataset dataset = client.store_dataset(host=data_node_host, dataset_id=dataset_id) utils.stdout_or_json(dataset.to_dict(), output)