def testInstantiation(self): for class_ in self._getExceptionClasses(): # some exceptions are becoming too complicated to instantiate # like the rest of the exceptions; just do them manually if class_ == exceptions.RequestValidationFailureException: objClass = protocol.SearchReadsRequest obj = objClass() obj.start = -1 jsonDict = protocol.toJsonDict(obj) args = (jsonDict, objClass) elif class_ == exceptions.ResponseValidationFailureException: objClass = protocol.SearchReadsResponse obj = objClass() obj.alignments.extend([protocol.ReadAlignment()]) obj.alignments[0].alignment.mapping_quality = -1 jsonDict = protocol.toJsonDict(obj) args = (jsonDict, objClass) else: numInitArgs = len(inspect.getargspec( class_.__init__).args) - 1 args = ['arg' for _ in range(numInitArgs)] instance = class_(*args) self.assertIsInstance(instance, exceptions.BaseServerException) message = instance.getMessage() self.assertIsInstance(message, basestring) self.assertGreater(len(message), 0) self.assertEqual(instance.getErrorCode(), class_.getErrorCode())
def insertReadGroupSet(self, readGroupSet): """ Inserts a the specified readGroupSet into this repository. """ sql = """ INSERT INTO ReadGroupSet ( id, datasetId, referenceSetId, name, programs, stats, dataUrl, indexFile) VALUES (?, ?, ?, ?, ?, ?, ?, ?); """ programsJson = json.dumps( [protocol.toJsonDict(program) for program in readGroupSet.getPrograms()]) statsJson = json.dumps(protocol.toJsonDict(readGroupSet.getStats())) cursor = self._dbConnection.cursor() try: cursor.execute(sql, ( readGroupSet.getId(), readGroupSet.getParentContainer().getId(), readGroupSet.getReferenceSet().getId(), readGroupSet.getLocalId(), programsJson, statsJson, readGroupSet.getDataUrl(), readGroupSet.getIndexFile())) except sqlite3.IntegrityError: raise exceptions.DuplicateNameException( readGroupSet.getLocalId(), readGroupSet.getParentContainer().getLocalId()) for readGroup in readGroupSet.getReadGroups(): self.insertReadGroup(readGroup)
def populateFromJson(self, jsonString): try: parsed = protocol.fromJson(jsonString, protocol.BioSample) except: raise exceptions.InvalidJsonException(jsonString) self._created = parsed.created self._updated = parsed.updated self._description = parsed.description self._disease = protocol.toJsonDict(parsed.disease) self._individualId = parsed.individual_id self._info = {} for key in parsed.info: self._info[key] = {"values": protocol.toJsonDict(parsed.info[key])} return self
def populateFromJson(self, jsonString): # TODO validate try: parsed = protocol.fromJson(jsonString, protocol.Individual) except: raise exceptions.InvalidJsonException(jsonString) self._created = parsed.created self._updated = parsed.updated self._description = parsed.description self._species = protocol.toJsonDict(parsed.species) self._sex = protocol.toJsonDict(parsed.sex) self._info = {} for key in parsed.info: self._info[key] = {"values": protocol.toJsonDict(parsed.info[key])} return self
def insertVariantSet(self, variantSet): """ Inserts a the specified variantSet into this repository. """ sql = """ INSERT INTO VariantSet ( id, datasetId, referenceSetId, name, created, updated, metadata, dataUrlIndexMap) VALUES (?, ?, ?, ?, datetime('now'), datetime('now'), ?, ?); """ cursor = self._dbConnection.cursor() # We cheat a little here with the VariantSetMetadata, and encode these # within the table as a JSON dump. These should really be stored in # their own table metadataJson = json.dumps( [protocol.toJsonDict(metadata) for metadata in variantSet.getMetadata()]) urlMapJson = json.dumps(variantSet.getReferenceToDataUrlIndexMap()) try: cursor.execute(sql, ( variantSet.getId(), variantSet.getParentContainer().getId(), variantSet.getReferenceSet().getId(), variantSet.getLocalId(), metadataJson, urlMapJson)) except sqlite3.IntegrityError: raise exceptions.DuplicateNameException( variantSet.getLocalId(), variantSet.getParentContainer().getLocalId()) for callSet in variantSet.getCallSets(): self.insertCallSet(callSet)
def sendListRequest(self, path, request): headers = { 'Origin': self.exampleUrl, } data = protocol.toJsonDict(request) response = self.app.post(path, data=data, headers=headers) return response
def testToProtocolElement(self): dataset = datasets.Dataset('dataset1') term = protocol.OntologyTerm() term.term = "male genotypic sex" term.id = "PATO:0020001" term.source_name = "PATO" term.source_version = pb.string("2015-11-18") # Write out a valid input print(protocol.toJsonDict(term)) validIndividual = protocol.Individual( name="test", created="2016-05-19T21:00:19Z", updated="2016-05-19T21:00:19Z", sex=term) validIndividual.info['test'].values.add().string_value = 'test-info' # pass through protocol creation individual = bioMetadata.Individual( dataset, "test") individual.populateFromJson(protocol.toJson(validIndividual)) gaIndividual = individual.toProtocolElement() # Verify elements exist self.assertEqual(gaIndividual.created, validIndividual.created) self.assertEqual(gaIndividual.updated, validIndividual.updated) # Invalid input invalidIndividual = '{"bad:", "json"}' individual = bioMetadata.Individual(dataset, "test") # Should fail self.assertRaises( exceptions.InvalidJsonException, individual.populateFromJson, invalidIndividual)
def testToProtocolElement(self): dataset = datasets.Dataset('dataset1') term = protocol.OntologyTerm() term.term = "male genotypic sex" term.id = "PATO:0020001" term.source_name = "PATO" term.source_version = pb.string("2015-11-18") # Write out a valid input print(protocol.toJsonDict(term)) validIndividual = protocol.Individual(name="test", created="2016-05-19T21:00:19Z", updated="2016-05-19T21:00:19Z", sex=term) validIndividual.info['test'].values.add().string_value = 'test-info' # pass through protocol creation individual = bioMetadata.Individual(dataset, "test") individual.populateFromJson(protocol.toJson(validIndividual)) gaIndividual = individual.toProtocolElement() # Verify elements exist self.assertEqual(gaIndividual.created, validIndividual.created) self.assertEqual(gaIndividual.updated, validIndividual.updated) # Invalid input invalidIndividual = '{"bad:", "json"}' individual = bioMetadata.Individual(dataset, "test") # Should fail self.assertRaises(exceptions.InvalidJsonException, individual.populateFromJson, invalidIndividual)
def _runListReferenceBasesPageRequest(self, id_, request): urlSuffix = "references/{id}/bases".format(id=id_) url = posixpath.join(self._urlPrefix, urlSuffix) params = self._getHttpParameters() params.update(protocol.toJsonDict(request)) response = self._session.get(url, params=params) self._checkResponseStatus(response) return self._deserializeResponse( response.text, protocol.ListReferenceBasesResponse)
def _run_list_reference_bases_page_request(self, id_, request): url_suffix = "references/{id}/bases".format(id=id_) url = posixpath.join(self._url_prefix, url_suffix) params = self._get_http_parameters() params.update(protocol.toJsonDict(request)) response = self._session.get(url, params=params) self._check_response_status(response) return self._deserialize_response(response.text, protocol.ListReferenceBasesResponse)
def _run_list_reference_bases_page_request(self, id_, request): url_suffix = "references/{id}/bases".format(id=id_) url = posixpath.join(self._url_prefix, url_suffix) params = self._get_http_parameters() params.update(protocol.toJsonDict(request)) response = self._session.get(url, params=params) self._check_response_status(response) return self._deserialize_response( response.text, protocol.ListReferenceBasesResponse)
def insertReadGroup(self, readGroup): """ Inserts the specified readGroup into the DB. """ sql = """ INSERT INTO ReadGroup ( id, readGroupSetId, name, predictedInsertSize, sampleId, description, stats, experiment, created, updated) VALUES (?, ?, ?, ?, ?, ?, ?, ?, datetime('now'), datetime('now')); """ cursor = self._dbConnection.cursor() statsJson = json.dumps(protocol.toJsonDict(readGroup.getStats())) experimentJson = json.dumps( protocol.toJsonDict(readGroup.getExperiment())) cursor.execute(sql, ( readGroup.getId(), readGroup.getParentContainer().getId(), readGroup.getLocalId(), readGroup.getPredictedInsertSize(), readGroup.getSampleId(), readGroup.getDescription(), statsJson, experimentJson))
def verifyParsedOutputsEqual( self, clientIterator, cliCommand, cliArguments=""): """ Verify that the parsed JSON of all the objects in the specified client iterator are equal to the parsed JSON from the specified CLI command. """ cliOutput = self.captureJsonOutput(cliCommand, cliArguments) clientOutput = [protocol.toJsonDict(gObj) for gObj in clientIterator] self.assertEqual(clientOutput, cliOutput) return len(clientOutput)
def insertReadGroup(self, readGroup): """ Inserts the specified readGroup into the DB. """ sql = """ INSERT INTO ReadGroup ( id, readGroupSetId, name, predictedInsertSize, sampleName, description, stats, experiment, bioSampleId, created, updated) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, datetime('now'), datetime('now')); """ cursor = self._dbConnection.cursor() statsJson = json.dumps(protocol.toJsonDict(readGroup.getStats())) experimentJson = json.dumps( protocol.toJsonDict(readGroup.getExperiment())) cursor.execute(sql, ( readGroup.getId(), readGroup.getParentContainer().getId(), readGroup.getLocalId(), readGroup.getPredictedInsertSize(), readGroup.getSampleName(), readGroup.getDescription(), statsJson, experimentJson, readGroup.getBioSampleId()))
def verifyParsedOutputsEqual(self, clientIterator, cliCommand, cliArguments=""): """ Verify that the parsed JSON of all the objects in the specified client iterator are equal to the parsed JSON from the specified CLI command. """ cliOutput = self.captureJsonOutput(cliCommand, cliArguments) clientOutput = [protocol.toJsonDict(gObj) for gObj in clientIterator] self.assertEqual(clientOutput, cliOutput) return len(clientOutput)
def sendListReferenceBasesRequest(self, id_, request): """ Sends a ListReferenceBasesRequest and parses the result into a ListReferenceBasesResponse. """ path = '/references/{}/bases'.format(id_) response = self.app.get( path, query_string=protocol.toJsonDict(request)) self.assertEqual(response.status_code, 200) obj = protocol.fromJson( response.data, protocol.ListReferenceBasesResponse) self.assertIsInstance(obj, protocol.ListReferenceBasesResponse) return obj
def _run_list_reference_bases_page_request(self, id_, request): request_args = protocol.toJsonDict(request) # We need to remove end from this dict if it's not specified because # of the way we're interacting with Flask and HTTP GET params. # TODO: This is a really nasty way of doing things; we really # should just have a request object and pass that around instead of an # arguments dictionary. if request.end is 0: del request_args["end"] if request.page_token == '': del request_args["pageToken"] response_json = self._backend.runListReferenceBases(id_, request_args) return self._deserialize_response( response_json, protocol.ListReferenceBasesResponse)
def _formatExternalIdentifiers(self, element, element_type): """ Formats several external identifiers for query """ elementClause = None elements = [] if not issubclass(element.__class__, dict): element = protocol.toJsonDict(element) if element['externalIdentifiers']: for _id in element['externalIdentifiers']: elements.append( self._formatExternalIdentifier(_id, element_type)) elementClause = "({})".format(" || ".join(elements)) return elementClause
def _runListReferenceBasesPageRequest(self, id_, request): requestArgs = protocol.toJsonDict(request) # We need to remove end from this dict if it's not specified because # of the way we're interacting with Flask and HTTP GET params. # TODO: This is a really nasty way of doing things; we really # should just have a request object and pass that around instead of an # arguments dictionary. if request.end is 0: del requestArgs["end"] if request.page_token == '': del requestArgs["pageToken"] responseJson = self._backend.runListReferenceBases(id_, requestArgs) return self._deserializeResponse( responseJson, protocol.ListReferenceBasesResponse)
def _formatExternalIdentifiers(self, element, element_type): """ Formats several external identifiers for query """ elementClause = None elements = [] if not issubclass(element.__class__, dict): element = protocol.toJsonDict(element) if element['externalIdentifiers']: for _id in element['externalIdentifiers']: elements.append(self._formatExternalIdentifier( _id, element_type)) elementClause = "({})".format(" || ".join(elements)) return elementClause
def _formatOntologyTermObject(self, terms, element_type): """ Formats the ontology term object for query """ elementClause = None if not isinstance(terms, collections.Iterable): terms = [terms] elements = [] for term in terms: if not issubclass(term.__class__, dict): term = protocol.toJsonDict(term) if term['id']: elements.append('?{} = <{}> '.format(element_type, term['id'])) else: elements.append('?{} = <{}> '.format( element_type, self._toNamespaceURL(term['term']))) elementClause = "({})".format(" || ".join(elements)) return elementClause
def testListReferenceBasesErrors(self): referenceSet = self.dataRepo.getReferenceSets()[0] for badId in self.getBadIds(): path = '/references/{}/bases'.format(badId) response = self.app.get(path) self.assertEqual(response.status_code, 404) reference = references.AbstractReference(referenceSet, badId) path = '/references/{}/bases'.format(reference.getId()) response = self.app.get(path) self.assertEqual(response.status_code, 404) path = '/references/{}/bases'.format(self.reference.getId()) length = self.reference.getLength() badRanges = [(-1, 0), (-1, -1), (length, 0), (0, length + 1)] for start, end in badRanges: args = protocol.ListReferenceBasesRequest() args.start, args.end = start, end response = self.app.get( path, query_string=protocol.toJsonDict(args)) self.assertEqual(response.status_code, 416)
def _formatOntologyTermObject(self, terms, element_type): """ Formats the ontology term object for query """ elementClause = None if not isinstance(terms, collections.Iterable): terms = [terms] elements = [] for term in terms: if not issubclass(term.__class__, dict): term = protocol.toJsonDict(term) if term['id']: elements.append('?{} = <{}> '.format( element_type, term['id'])) else: elements.append('?{} = <{}> '.format( element_type, self._toNamespaceURL(term['term']))) elementClause = "({})".format(" || ".join(elements)) return elementClause
def insertVariantAnnotationSet(self, variantAnnotationSet): """ Inserts a the specified variantAnnotationSet into this repository. """ sql = """ INSERT INTO VariantAnnotationSet ( id, variantSetId, ontologyId, name, analysis, annotationType) VALUES (?, ?, ?, ?, ?, ?); """ analysisJson = json.dumps( protocol.toJsonDict(variantAnnotationSet.getAnalysis())) cursor = self._dbConnection.cursor() cursor.execute(sql, ( variantAnnotationSet.getId(), variantAnnotationSet.getParentContainer().getId(), variantAnnotationSet.getOntology().getId(), variantAnnotationSet.getLocalId(), analysisJson, variantAnnotationSet.getAnnotationType()))
def insertVariantAnnotationSet(self, variantAnnotationSet): """ Inserts a the specified variantAnnotationSet into this repository. """ sql = """ INSERT INTO VariantAnnotationSet ( id, variantSetId, ontologyId, name, analysis, annotationType, created, updated) VALUES (?, ?, ?, ?, ?, ?, ?, ?); """ analysisJson = json.dumps( protocol.toJsonDict(variantAnnotationSet.getAnalysis())) cursor = self._dbConnection.cursor() cursor.execute(sql, ( variantAnnotationSet.getId(), variantAnnotationSet.getParentContainer().getId(), variantAnnotationSet.getOntology().getId(), variantAnnotationSet.getLocalId(), analysisJson, variantAnnotationSet.getAnnotationType(), variantAnnotationSet.getCreationTime(), variantAnnotationSet.getUpdatedTime()))
def pagedResults(geneName, soTerm, pageNumber): resultCount = 0 pageSize = 20 pageCount = 0 searchOntologyTerm = str(soTerm) ### Searches for features by gene symbol print("Looking for a gene") gene = geneBySymbol(geneName) print("Found {}".format(gene.name)) ### FUNCTIONAL ANNOTATIONS ### ### Search annotations with feature, range, and effect print("searching for variant annotations") searchedVarAnns = c.search_variant_annotations( variant_annotation_set_id=functionalAnnotationSet.id, start=gene.start, end=gene.end, reference_name=gene.reference_name.replace('chr', ''), effects=[{ 'id': searchOntologyTerm }]) variantIdList = [] ### Unpack annotations from the searched variant annotations, and store their ID's in variantIdList ### Store the term name in the term variable print("unpacking annotations and storing ID's") for annotation in searchedVarAnns: variantIdList.append(annotation.variant_id) for teff in annotation.transcript_effects: for effect in teff.effects: if effect.id == searchOntologyTerm: term = effect.term variantList = [] ### Using the ID's in variantIdList, use get_variant function to store the variant information in variantList ### This will be used later below in a for loop print("populating variantList") for id_ in variantIdList: gotten = c.get_variant(id_) variantList.append(gotten) ### PHASE3-RELEASE ### ### Now that we have all of the functional annotation data we need, we need to dig into the phase3-release data print("grabbing phase3-releases") for variantSet in c.search_variant_sets(dataset.id): if variantSet.name == "phase3-release": phaseVariantSet = variantSet ### Grab bio sample information of all individuals. This information includes population information and family data print("grabbing biosamples") bioSampleDict = {} bioSamplesList = list(c.search_bio_samples(dataset.id)) bsIdToBsam = {} for biosample in c.search_bio_samples(dataset.id): bsIdToBsam[biosample.id] = biosample allCallSets = list(c.search_call_sets(phaseVariantSet.id)) ### Store all 2504 call set ID's in the callSetIds list and populate the bioSampleDict object print("grabbing callsets") callSetIds = [] for callSet in allCallSets: callSetIds.append(str(callSet.id)) bioSampleDict[callSet.id] = bsIdToBsam[callSet.bio_sample_id] ### Using all of the variants within variantList, search for variants based on the start position, end position, and the call set ID's ### If a given callset possesses the search term the client is looking for (if genotype[0]==1), then the result is a match and its information ### is added to matchList ### The pagination feature is worked into the for loops as well; If the number of found results are greater than or equal to the number ### of results per page multiplied by the page the client wants then the result is appended to matchList, otherwise the loops are broken out of. print("creating matchList") matchList = [] phaseVariantList = [] nextPageNum = int(pageNumber) for variant in variantList: searchResults = c.search_variants( phaseVariantSet.id, start=variant.start, end=variant.end, reference_name=variant.reference_name, call_set_ids=callSetIds) for result in searchResults: if len(matchList) == pageSize: break for call in result.calls: if call.genotype[0] == 1 or call.genotype[1] == 1: ### A human-friendly string is printed so that the client can easily see where matches were found. readableString = unicode(call.call_set_name + " has " + str(term) + " in gene " + geneName + " at position " + str(variant.start) + " to " + str(variant.end)) print(readableString) matchResult = {} v = p.toJsonDict(result) del v['calls'] matchResult['variant'] = v matchResult['biosample'] = p.toJsonDict( bioSampleDict[call.call_set_id]) resultCount += 1 if len(matchList) == pageSize: nextPageNum += 1 if nextPageNum == int(pageNumber): nextPageNum = None break if resultCount >= (pageSize * int(pageNumber)): matchList.append(matchResult) ### Finally, the next page token, matchList, gene, term, and search ontology term are returned to the client as JSON print("returning") return flask.jsonify({ 'next_page_token': nextPageNum, 'matches': matchList, 'gene': geneName, 'term': term, 'search_ontology_term': soTerm })
def testToGA4GH(self): sample_associations = { u'environment_label': u'sunitinib', u'feature_label': u'RET M918T missense mutation', u'evidence_type': u'http://purl.obolibrary.org/obo/ECO_0000033', u'feature': { u'http://purl.obolibrary.org/obo/GENO_0000408': u'http://www.ncbi.nlm.nih.gov/gene/5979', u'http://purl.obolibrary.org/obo/GENO_reference_amino_acid': u'M', u'http://www.w3.org/1999/02/22-rdf-syntax-ns#type': u'http://purl.obolibrary.org/obo/SO_0001059', u'http://biohackathon.org/resource/faldo#location': u'http://www.monarchinitiative.org/_918918UniProtKB:' 'P07949#P07949-1Region', u'http://purl.obolibrary.org/obo/GENO_reference_nucleotide': u'T', u'http://purl.obolibrary.org/obo/' 'GENO_results_in_amino_acid_change': u'T', u'http://purl.obolibrary.org/obo/RO_0002200': u'http://ohsu.edu/cgd/3774b1d2', u'http://purl.obolibrary.org/obo/RO_0002205': u'http://www.ncbi.nlm.nih.gov/CCDS/CcdsBrowse.cgi?' 'REQUEST=CCDS&DATA=7200.1', u'http://purl.obolibrary.org/obo/GENO_altered_nucleotide': u'C', u'http://www.w3.org/2000/01/rdf-schema#label': u'RET M918T missense mutation', u'id': u'http://cancer.sanger.ac.uk/cosmic/mutation/' 'overview?id=965', u'http://www.w3.org/2002/07/owl#sameAs': u'http://www.ncbi.nlm.nih.gov/SNP/74799832', }, u'evidence': u'http://ohsu.edu/cgd/sensitivity', u'environment': { u'http://purl.obolibrary.org/obo/RO_0002606': u'http://ohsu.edu/cgd/71fe9f0f', u'http://www.w3.org/2000/01/rdf-schema#subClassOf': u'http://purl.obolibrary.org/obo/CHEBI_23888', u'http://www.w3.org/1999/02/22-rdf-syntax-ns#type': u'http://www.w3.org/2002/07/owl#Class', u'http://www.w3.org/2000/01/rdf-schema#label': u'sunitinib', u'id': u'http://www.drugbank.ca/drugs/DB01268', }, u'sources': u'http://www.ncbi.nlm.nih.gov/pubmed/21470995|' 'http://www.ncbi.nlm.nih.gov/pubmed/21470995', u'phenotype': { u'http://purl.obolibrary.org/obo/BFO_0000159': u'http://ohsu.edu/cgd/sensitivity', u'http://www.w3.org/1999/02/22-rdf-syntax-ns#type': u'http://purl.obolibrary.org/obo/DOID_3969', u'http://www.w3.org/2000/01/rdf-schema#label': u'Papillary thyroid carcinoma with sensitivity to therapy', u'id': u'http://ohsu.edu/cgd/30ebfd1a', }, u'phenotype_label': u'Papillary thyroid carcinoma with sensitivity to therapy', u'id': u'http://ohsu.edu/cgd/fe484b5c', u'association': u'http://ohsu.edu/cgd/fe484b5c', } result = self.phenotypeAssocationSet._toGA4GH(sample_associations) self.assertEqual( result.__class__.__name__, 'FeaturePhenotypeAssociation') fpa_dict = protocol.toJsonDict(result) description = 'Association: genotype:[RET M918T missense mutation]' \ ' phenotype:[Papillary thyroid carcinoma with ' \ 'sensitivity to therapy] environment:[sunitinib]' \ ' evidence:[sensitivity] publications:' \ '[http://www.ncbi.nlm.nih.gov/pubmed/21470995|' \ 'http://www.ncbi.nlm.nih.gov/pubmed/21470995]' self.assertEqual(fpa_dict['description'], description) self.assertIn('featureIds', fpa_dict.keys()) self.assertIn('evidence', fpa_dict.keys()) self.assertIn('environmentalContexts', fpa_dict.keys()) self.assertEqual(len(fpa_dict['featureIds']), 1) self.assertEqual(len(fpa_dict['evidence']), 1) self.assertEqual(len(fpa_dict['environmentalContexts']), 1)
def testToGA4GH(self): sample_associations = { u'environment_label': u'sunitinib', u'feature_label': u'RET M918T missense mutation', u'evidence_type': u'http://purl.obolibrary.org/obo/ECO_0000033', u'feature': { u'http://purl.obolibrary.org/obo/GENO_0000408': u'http://www.ncbi.nlm.nih.gov/gene/5979', u'http://purl.obolibrary.org/obo/GENO_reference_amino_acid': u'M', u'http://www.w3.org/1999/02/22-rdf-syntax-ns#type': u'http://purl.obolibrary.org/obo/SO_0001059', u'http://biohackathon.org/resource/faldo#location': u'http://www.monarchinitiative.org/_918918UniProtKB:' 'P07949#P07949-1Region', u'http://purl.obolibrary.org/obo/GENO_reference_nucleotide': u'T', u'http://purl.obolibrary.org/obo/' 'GENO_results_in_amino_acid_change': u'T', u'http://purl.obolibrary.org/obo/RO_0002200': u'http://ohsu.edu/cgd/3774b1d2', u'http://purl.obolibrary.org/obo/RO_0002205': u'http://www.ncbi.nlm.nih.gov/CCDS/CcdsBrowse.cgi?' 'REQUEST=CCDS&DATA=7200.1', u'http://purl.obolibrary.org/obo/GENO_altered_nucleotide': u'C', u'http://www.w3.org/2000/01/rdf-schema#label': u'RET M918T missense mutation', u'id': u'http://cancer.sanger.ac.uk/cosmic/mutation/' 'overview?id=965', u'http://www.w3.org/2002/07/owl#sameAs': u'http://www.ncbi.nlm.nih.gov/SNP/74799832', }, u'evidence': u'http://ohsu.edu/cgd/sensitivity', u'environment': { u'http://purl.obolibrary.org/obo/RO_0002606': u'http://ohsu.edu/cgd/71fe9f0f', u'http://www.w3.org/2000/01/rdf-schema#subClassOf': u'http://purl.obolibrary.org/obo/CHEBI_23888', u'http://www.w3.org/1999/02/22-rdf-syntax-ns#type': u'http://www.w3.org/2002/07/owl#Class', u'http://www.w3.org/2000/01/rdf-schema#label': u'sunitinib', u'id': u'http://www.drugbank.ca/drugs/DB01268', }, u'sources': u'http://www.ncbi.nlm.nih.gov/pubmed/21470995|' 'http://www.ncbi.nlm.nih.gov/pubmed/21470995', u'phenotype': { u'http://purl.obolibrary.org/obo/BFO_0000159': u'http://ohsu.edu/cgd/sensitivity', u'http://www.w3.org/1999/02/22-rdf-syntax-ns#type': u'http://purl.obolibrary.org/obo/DOID_3969', u'http://www.w3.org/2000/01/rdf-schema#label': u'Papillary thyroid carcinoma with sensitivity to therapy', u'id': u'http://ohsu.edu/cgd/30ebfd1a', }, u'phenotype_label': u'Papillary thyroid carcinoma with sensitivity to therapy', u'id': u'http://ohsu.edu/cgd/fe484b5c', u'association': u'http://ohsu.edu/cgd/fe484b5c', } result = self.phenotypeAssocationSet._toGA4GH(sample_associations) self.assertEqual(result.__class__.__name__, 'FeaturePhenotypeAssociation') fpa_dict = protocol.toJsonDict(result) description = 'Association: genotype:[RET M918T missense mutation]' \ ' phenotype:[Papillary thyroid carcinoma with ' \ 'sensitivity to therapy] environment:[sunitinib]' \ ' evidence:[sensitivity] publications:' \ '[http://www.ncbi.nlm.nih.gov/pubmed/21470995|' \ 'http://www.ncbi.nlm.nih.gov/pubmed/21470995]' self.assertEqual(fpa_dict['description'], description) self.assertIn('featureIds', fpa_dict.keys()) self.assertIn('evidence', fpa_dict.keys()) self.assertIn('environmentalContexts', fpa_dict.keys()) self.assertEqual(len(fpa_dict['featureIds']), 1) self.assertEqual(len(fpa_dict['evidence']), 1) self.assertEqual(len(fpa_dict['environmentalContexts']), 1)