def test_build_annotation_inferences(): with open("tests/resources/test.inferences.json") as inferences_file: gaferences = json.load(inferences_file) inferences = gaference.build_annotation_inferences(gaferences) akey = gaference.AnnotationKey( gaference.RelationTo("http://purl.obolibrary.org/obo/BFO_0000050", "http://purl.obolibrary.org/obo/GO_0036064"), "http://purl.obolibrary.org/obo/NCBITaxon_10090", association.ExtensionConjunctions( frozenset([ association.ExtensionUnit( "http://purl.obolibrary.org/obo/BFO_0000050", "http://purl.obolibrary.org/obo/EMAPA_17168"), association.ExtensionUnit( "http://purl.obolibrary.org/obo/BFO_0000050", "http://purl.obolibrary.org/obo/CL_0010009") ]))) val = inferences[akey] expected = gaference.InferenceValue(True, False, [ gaference.RelationTo("http://purl.obolibrary.org/obo/BFO_0000050", "http://purl.obolibrary.org/obo/GO_0097458") ]) assert val == expected
def test_produce_inference_produces_many_inferences(): with open("tests/resources/test.inferences.json") as inference_file: gaferences = json.load(inference_file) inferences = gaference.build_annotation_inferences(gaferences) gaf_line = "MGI\tMGI:1345162\tAdam23\t\tGO:0099056\tMGI:MGI:4431144|PMID:20133599\tIDA\t\tC\ta disintegrin and metallopeptidase domain 23\tMDC3\tprotein\ttaxon:10090\t20180711\tSynGO\tpart_of(GO:0098978),part_of(UBERON:0000061),part_of(EMAPA:35405)|part_of(GO:0098978),part_of(UBERON:0000061),part_of(EMAPA:16894)\t" gaf = gaf_line.split("\t") results = gaference.produce_inferences(gaf, inferences) expected_lines = [ [ "MGI", "MGI:1345162", "Adam23", "", "GO:0098978", "MGI:MGI:4431144|PMID:20133599", "IDA", "", "C", "a disintegrin and metallopeptidase domain 23", "MDC3", "protein", "taxon:10090", "20180711", "SynGO", "", "" ], [ "MGI", "MGI:1345162", "Adam23", "", "GO:0098978", "MGI:MGI:4431144|PMID:20133599", "IDA", "", "C", "a disintegrin and metallopeptidase domain 23", "MDC3", "protein", "taxon:10090", "20180711", "SynGO", "", "" ], ] assert len(results) == 2 assert results[0].inferred_gafs[0] == expected_lines[0] assert results[1].inferred_gafs[0] == expected_lines[1]
def test_pombase_taxon_failure(): with open("tests/resources/test.inferences.json") as inference_file: gaferences = json.load(inference_file) inferences = gaference.build_annotation_inferences(gaferences) gaf_line = "PomBase\tSPBC11B10.09\tcdc2\t\tGO:0007275\tPMID:21873635\tIBA\tPANTHER:PTN000623979|TAIR:locus:2099478\tP\tCyclin-dependent kinase 1\tUniProtKB:P04551|PTN000624043\tprotein\ttaxon:284812\t20170228\tGO_Central" gaf = gaf_line.split("\t") results = gaference.produce_inferences(gaf, inferences) assert len(results) == 1 assert results[0].problem == gaference.ProblemType.TAXON
def test_taxon_check_failure(): with open("tests/resources/test.inferences.json") as inference_file: gaferences = json.load(inference_file) inferences = gaference.build_annotation_inferences(gaferences) gaf_line = "MGI\tMGI:1924956\tAbcb5\t\tGO:0048058\tMGI:MGI:5585659|PMID:25030174\tIMP\t\tP\tATP-binding cassette, sub-family B (MDR/TAP), member 5\t9230106F14Rik\tprotein\ttaxon:10090\t20140729\tUniProt\t\t" gaf = gaf_line.split("\t") results = gaference.produce_inferences(gaf, inferences) assert len(results) == 1 assert results[0].problem == gaference.ProblemType.TAXON
def test_extension_check_failure(): with open("tests/resources/test.inferences.json") as inference_file: gaferences = json.load(inference_file) inferences = gaference.build_annotation_inferences(gaferences) gaf_line = "MGI\tMGI:109192\tActn2\t\tGO:0072659\tMGI:MGI:4366185|PMID:19815520\tIMP\t\tP\tactinin alpha 2\t1110008F24Rik\tprotein\ttaxon:10090\t20150506\tUniProt\tpart_of(CL:0002495),has_direct_input(UniProtKB:P58390)\t" gaf = gaf_line.split("\t") results = gaference.produce_inferences(gaf, inferences) assert len(results) == 1 assert results[0].problem == gaference.ProblemType.EXTENSION
def test_produce_inference_produces_inferences(): with open("tests/resources/test.inferences.json") as inference_file: gaferences = json.load(inference_file) inferences = gaference.build_annotation_inferences(gaferences) gaf_line = "MGI\tMGI:2178217\tAkap9\t\tGO:0036064\tMGI:MGI:5303017|PMID:22031837\tIDA\t\tC\tA kinase (PRKA) anchor protein (yotiao) 9\t5730481H23Rik|AKAP450|G1-448-15|mei2-5|repro12\tprotein\ttaxon:10090\t20131226\tMGI\tpart_of(EMAPA:17168),part_of(CL:0010009)\t" gaf = gaf_line.split("\t") results = gaference.produce_inferences(gaf, inferences) expected_line = [ "MGI", "MGI:2178217", "Akap9", "", "GO:0097458", "MGI:MGI:5303017|PMID:22031837", "IDA", "", "C", "A kinase (PRKA) anchor protein (yotiao) 9", "5730481H23Rik|AKAP450|G1-448-15|mei2-5|repro12", "protein", "taxon:10090", "20131226", "MGI", "", "" ] assert len(results) == 1 assert len(results[0].inferred_gafs) == 1 assert results[0].inferred_gafs[0] == expected_line
def main(): """ Wrapper for Assoc Parsing """ parser = argparse.ArgumentParser(description='Wrapper for obographs assocmodel library' """ By default, ontologies and assocs are cached locally and synced from a remote sparql endpoint """, formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('-r', '--resource', type=str, required=False, help='Name of ontology') parser.add_argument('-f', '--file', type=str, required=False, help='Name of input file for associations - currently GAF is assumed') parser.add_argument('-F', '--format', type=str, required=False, help='Format of assoc file. One of GAF, GPAD or HPOA') parser.add_argument('-o', '--outfile', type=str, required=False, help='Path to output file') parser.add_argument("--report-md", type=str, required=False, dest="report_md", help="Path to report markdown file") parser.add_argument("--report-json", type=str, required=False, dest="report_json", help="Path to report JSON file") parser.add_argument('-t', '--to', type=str, required=False, help='Output to (tree, dot, ...)') parser.add_argument("--filter-out", nargs="+", required=False, default=[], metavar="EVIDENCE", help="List of any evidence codes to filter out of the GAF. E.G. --filter-out IEA IMP") parser.add_argument("--filtered-file", required=False, default=None, metavar="FILTERED_FILE", help="File to write the filtered out evidence GAF to") parser.add_argument('-T', '--taxon', nargs='*', required=False, help='valid taxon (NCBITaxon ID) - validate against this') parser.add_argument('--subject_prefix', nargs='*', required=False, help='E.g PomBase - validate against this') parser.add_argument('--object_prefix', nargs='*', required=False, help='E.g GO - validate against this') parser.add_argument("-I", "--gaferencer-file", type=argparse.FileType('r'), required=False, help="Output from Gaferencer run on a set of GAF annotations") parser.add_argument('-v', '--verbosity', default=0, action='count', help='Increase output verbosity') parser.add_argument("--allow_paint", required=False, action="store_const", const=True, help="Allow IBAs in parser") parser.add_argument("-g", "--gpi", type=str, required=False, default=None, help="GPI file") parser.add_argument("-l", "--rule", action="append", required=None, default=[], dest="rule_set", help="Set of rules to be run. Default is no rules to be run, with the exception \ of gorule-0000027 and gorule-0000020. See command line documentation in the \ ontobio project or readthedocs for more information") subparsers = parser.add_subparsers(dest='subcommand', help='sub-command help') parser_n = subparsers.add_parser('validate', help='Validate associations') parser_n.set_defaults(function=validate_assocs) parser_n = subparsers.add_parser('filter', help='Filter associations') parser_n.set_defaults(function=filter_assocs) parser_n = subparsers.add_parser('convert', help='Convert associations') parser_n.set_defaults(function=convert_assocs) parser_n.add_argument('-t', '--to', type=str, required=True, choices=["GAF", "GPAD", "gaf", "gpad"], help='Format to convert to') parser_n.add_argument("-n", "--format-version", dest="version", type=str, required=False, default=None, help="Version for the file format. GAF default is 2.1, GPAD default is 1.2") parser_n = subparsers.add_parser('map2slim', help='Map to a subset/slim') parser_n.set_defaults(function=map2slim) parser_n.add_argument('-p', '--properties', nargs='*', type=str, default=['subClassOf', 'BFO:0000050'], help='Properties') parser_n.add_argument('-s', '--subset', type=str, required=True, help='subset (e.g. map2slim)') args = parser.parse_args() if args.verbosity >= 2: logging.basicConfig(level=logging.DEBUG) elif args.verbosity == 1: logging.basicConfig(level=logging.INFO) else: logging.basicConfig(level=logging.WARNING) logging.info("Welcome!") # Ontology Factory ont = None if args.resource is not None: ofactory = OntologyFactory() logging.info("Creating ont object from: {} {}".format(args.resource, ofactory)) ont = ofactory.create(args.resource) logging.info("ont: {}".format(ont)) func = args.function # Upper case all evidence codes args.filter_out = [code.upper() for code in args.filter_out] gaferences = None if args.gaferencer_file: gaferences = gaference.build_annotation_inferences(json.load(args.gaferencer_file)) rule_set = args.rule_set if rule_set == ["all"]: rule_set = assocparser.RuleSet.ALL # set configuration filtered_evidence_file = open(args.filtered_file, "w") if args.filtered_file else None config = assocparser.AssocParserConfig( valid_taxa=args.taxon, ontology=ont, class_idspaces=args.object_prefix, entity_idspaces=args.subject_prefix, filter_out_evidence=args.filter_out, filtered_evidence_file=filtered_evidence_file, annotation_inferences=gaferences, paint=args.allow_paint, gpi_authority_path=args.gpi, rule_set=rule_set ) p = None fmt = None if args.format is None: fmt = 'gaf' else: fmt = args.format.lower() # TODO: use a factory if fmt == 'gaf': p = GafParser(config=config, dataset=args.file) elif fmt == 'gpad': p = GpadParser(config=config) elif fmt == 'hpoa': p = HpoaParser(config=config) elif fmt == "gpi": p = entityparser.GpiParser() func = validate_entity outfh = None if args.outfile is not None: two_mb = 2097152 outfh = open(args.outfile, "w", buffering=two_mb) func(ont, args.file, outfh, p, args) if filtered_evidence_file: filtered_evidence_file.close() if outfh is not None: outfh.close() if args.report_md is not None: report_md = open(args.report_md, "w") report_md.write(p.report.to_markdown()) report_md.close() if args.report_json is not None: report_json = open(args.report_json, "w") report_json.write(json.dumps(p.report.to_report_json(), indent=4)) report_json.close() if not (args.report_md or args.report_json): print(p.report.to_markdown())