def get_valid_markers_by_symbol(self, symbol): """ Retrieve list of valid markers by symbol """ search_query = SearchQuery() search_query.set_param('symbol', symbol) # restrict to official and withdrawn search_query.set_param('_marker_status_keys', [1,2]) # only search mouse search_query.set_param('_organism_keys', [1]) search_results = self.marker_dao.search(search_query) markers = search_results.items batchLoadAttribute(markers, 'current_markers') batchLoadAttribute(markers, 'featuretype_vocterms') ValidMarkerSortHelper().sort(markers) # convert db models to domain objects search_results.items = convert_models(markers, SmallMarker) return search_results
def buildDagTreeFromRoot(rootnode, batchloadOn=True): """ Builds a single DAG tree views. with root as rootnode """ root = TreeNode(rootnode) dagtree = {"name": rootnode.dag_name, "root": root} # track all the found dag nodes for batch loading # vocterms later on foundNodes = set() # recurse children stack = [root] while stack: node = stack.pop() # get immediate children # batch load nodes of each fetched edge loadFirstChildren(node, foundNodes, batchloadOn) if node.children: for child in node.children: stack.append(child) # batch load all the term objects for every found node if batchloadOn: batchLoadAttribute(list(foundNodes), "vocterm") return dagtree
def reportIndex(): reports = Report.query.order_by(Report.created.desc()).all() batchLoadAttribute( reports, 'labels' ) return render_template("report/index.html", reports=reports)
def searchExperiments(marker_id=None, refs_id=None, expttypes=None, limit=None): """ Perform search for MappingExperiment records by various parameters e.g. marker_id, _refs_id ordered by MappingExperiment.expttype, _refs_key """ query = MappingExperiment.query if marker_id: marker_accession = db.aliased(Accession) sub_experiment = db.aliased(MappingExperiment) sq = db.session.query(sub_experiment) \ .join(sub_experiment.marker_assocs) \ .join(ExperimentMarkerAssoc.marker) \ .join(marker_accession, Marker.mgiid_object) \ .filter(marker_accession.accid==marker_id) \ .filter(sub_experiment._expt_key==MappingExperiment._expt_key) \ .correlate(MappingExperiment) query = query.filter( sq.exists() ) if refs_id: refs_accession = db.aliased(Accession) sub_experiment = db.aliased(MappingExperiment) sq = db.session.query(sub_experiment) \ .join(sub_experiment.reference) \ .join(refs_accession, Reference.jnumid_object) \ .filter(refs_accession.accid==refs_id) \ .filter(sub_experiment._expt_key==MappingExperiment._expt_key) \ .correlate(MappingExperiment) query = query.filter( sq.exists() ) if expttypes: query = query.filter(MappingExperiment.expttype.in_(MappingExperiment.VALID_EXPTTYPES)) query = query.order_by(MappingExperiment.expttype, MappingExperiment._refs_key, MappingExperiment.chromosome) if limit: query = query.limit(limit) experiments = query.all() # load any necessary data for summary batchLoadAttribute(experiments, 'reference') return experiments
def buildDagTreeFromRoot(rootnode, batchloadOn=True): """ Builds a single DAG tree views. with root as rootnode """ root = TreeNode(rootnode) dagtree = {"name": rootnode.dag_name, "root":root} # track all the found dag nodes for batch loading # vocterms later on foundNodes = set() # recurse children stack = [root] while stack: node = stack.pop() # get immediate children # batch load nodes of each fetched edge loadFirstChildren(node, foundNodes, batchloadOn) if node.children: for child in node.children: stack.append(child) # batch load all the term objects for every found node if batchloadOn: batchLoadAttribute(list(foundNodes), "vocterm") return dagtree
def search_emapa_terms(self, search_query): """ Search specifically for EMAPA vocterm objects """ # don't search obsolete terms by default if not search_query.has_valid_param('isobsolete'): search_query.set_param('isobsolete', 0) search_query.set_param('vocab_name', 'EMAPA') search_query.sorts = ["term"] search_result = self.vocterm_dao.search(search_query) terms = search_result.items # batch load necessary attributes batchLoadAttribute(terms, "emapa_info") batchLoadAttribute(terms, "synonyms") # add term highlights if termSearch if search_query.has_valid_param('termSearch'): self._add_emapa_highlights(terms, search_query.get_value('termSearch')) search_result.items = convert_models(terms, EMAPATermDomain) return search_result
def _prepProbe(probe): """ Load any attributes a detail page might need """ if probe: # add the has_references existence attribute batchLoadAttribute([probe], 'markers') batchLoadAttribute([probe], 'references')
def searchExperiments(marker_id=None, refs_id=None, expttypes=None, limit=None): """ Perform search for MappingExperiment records by various parameters e.g. marker_id, _refs_id ordered by MappingExperiment.expttype, _refs_key """ query = MappingExperiment.query if marker_id: marker_accession = db.aliased(Accession) sub_experiment = db.aliased(MappingExperiment) sq = db.session.query(sub_experiment) \ .join(sub_experiment.marker_assocs) \ .join(ExperimentMarkerAssoc.marker) \ .join(marker_accession, Marker.mgiid_object) \ .filter(marker_accession.accid==marker_id) \ .filter(sub_experiment._expt_key==MappingExperiment._expt_key) \ .correlate(MappingExperiment) query = query.filter(sq.exists()) if refs_id: refs_accession = db.aliased(Accession) sub_experiment = db.aliased(MappingExperiment) sq = db.session.query(sub_experiment) \ .join(sub_experiment.reference) \ .join(refs_accession, Reference.jnumid_object) \ .filter(refs_accession.accid==refs_id) \ .filter(sub_experiment._expt_key==MappingExperiment._expt_key) \ .correlate(MappingExperiment) query = query.filter(sq.exists()) if expttypes: query = query.filter( MappingExperiment.expttype.in_(MappingExperiment.VALID_EXPTTYPES)) query = query.order_by(MappingExperiment.expttype, MappingExperiment._refs_key, MappingExperiment.chromosome) if limit: query = query.limit(limit) experiments = query.all() # load any necessary data for summary batchLoadAttribute(experiments, 'reference') return experiments
def loadFirstChildren(root, foundNodes, batchloadOn): # get immediate children # batch load nodes of each fetched edge if batchloadOn: batchLoadAttribute(root.dagnode.child_edges, "child_node") for edge in root.dagnode.child_edges: child_node = edge.child_node ctree_node = TreeNode(child_node) ctree_node.parent = root ctree_node.edge_label = edge.label root.children.append(ctree_node) foundNodes.add(child_node)
def searchAlleles(refs_id=None, marker_id=None, assay_id=None, limit=None): """ Perform search for Alleles """ query = Allele.query if assay_id: query = query.filter( Allele.assays.any(Assay.mgiid==assay_id) ) if marker_id: query = query.join(Allele.marker) marker_accession = db.aliased(Accession) query = query.join(marker_accession, Marker.mgiid_object) query = query.filter( marker_accession.accid==marker_id ) if refs_id: jnum_accession = db.aliased(Accession) sub_allele = db.aliased(Allele) sq = db.session.query(sub_allele) \ .join(sub_allele.explicit_references) \ .join(jnum_accession, Reference.jnumid_object) \ .filter(jnum_accession.accid==refs_id) \ .filter(sub_allele._allele_key==Allele._allele_key) \ .correlate(Allele) query = query.filter( sq.exists() ) query = query.order_by(Allele.transmission.desc(), Allele.status, Allele.symbol) if limit: query = query.limit(limit) alleles = query.all() # load attributes needed on summary batchLoadAttribute(alleles, "mp_annots") batchLoadAttribute(alleles, "disease_annots") batchLoadAttribute(alleles, "disease_annots_DO") batchLoadAttribute(alleles, "subtypes") batchLoadAttribute(alleles, "synonyms") return alleles
def renderImageDetail(image): # batch load some expression info for the image detail batchLoadAttribute(image.imagepanes, 'insituresults') batchLoadAttribute(image.imagepanes, 'insituresults.specimen') batchLoadAttribute(image.imagepanes, 'insituresults.specimen.assay') batchLoadAttribute(image.imagepanes, 'insituresults.specimen.assay.marker') batchLoadAttribute(image.imagepanes, 'gel_assays') #batchLoadAttribute(image.imagepanes, 'gel_assay.marker') # get reference for image reference = reference_service.get_by_key(image._refs_key) return render_template('detail/image_detail.html', image = image, reference = reference)
def loadSiblings(originalNode, currentNode, foundNodes, batchloadOn): # expand first parent to load siblings if batchloadOn: batchLoadAttribute(currentNode.dagnode.child_edges, "child_node") currentNode.children = [] for edge in currentNode.dagnode.child_edges: child_node = edge.child_node ctree_node = TreeNode(child_node) # preserve original node structure in order if ctree_node._term_key == originalNode._term_key: ctree_node = originalNode ctree_node.parent = currentNode ctree_node.edge_label = edge.label currentNode.children.append(ctree_node) foundNodes.add(child_node)
def searchAntibodies(marker_id=None, refs_id=None): """ Perform search for Antibody records by various parameters e.g. marker_id, _refs_id ordered by Marker.symbol, Antibody.antibodyname, Antibody.mgiid """ query = Antibody.query if refs_id: reference_accession = db.aliased(Accession) sub_antibody = db.aliased(Antibody) sq = db.session.query(sub_antibody) \ .join(sub_antibody.references) \ .join(reference_accession, Reference.jnumid_object) \ .filter(reference_accession.accid==refs_id) \ .filter(sub_antibody._antibody_key==Antibody._antibody_key) \ .correlate(Antibody) query = query.filter( sq.exists() ) if marker_id: marker_accession = db.aliased(Accession) sub_antibody = db.aliased(Antibody) sq = db.session.query(sub_antibody) \ .join(sub_antibody.markers) \ .join(marker_accession, Marker.mgiid_object) \ .filter(marker_accession.accid==marker_id) \ .filter(sub_antibody._antibody_key==Antibody._antibody_key) \ .correlate(Antibody) query = query.filter( sq.exists() ) antibodies = query.all() # load data needed on summary page batchLoadAttribute(antibodies, 'antigen') batchLoadAttribute(antibodies, 'antigen.source') batchLoadAttribute(antibodies, 'markers') batchLoadAttribute(antibodies, 'references') # sort antibodies in python, because we need the first marker symbol # and I'm not sure how to do that in SQLAlchemy _sort_antibodies(antibodies) return antibodies
def sort_clipboard(self, _user_key): """ Sorts all EMAPA clipboard items where _createdby_key == _user_key Sort is by stage, then alpha on term """ result = self.clipboard_dao.get_clipboard_items(_user_key) set_members = result.items batchLoadAttribute(set_members, "emapa") batchLoadAttribute(set_members, "emapa_term") # sort self._sort_items_by_alpha(set_members) self.clipboard_dao.save_all(set_members)
def reportSummary(): query = Report.query tagsToQuery = [] if 'tags' in request.args: tags = request.args['tags'] tags = TAG_REGEX.split(tags) for tag in tags: tag = tag.strip() if tag: tag = tag.lower() tagsToQuery.append(tag) query = query.filter( Report.labels.any(ReportLabel.label.in_(tagsToQuery))) requested_by = '' if 'requested_by' in request.args: requested_by = request.args['requested_by'] requested_by = requested_by.strip().lower() if requested_by: query = query.filter( db.func.lower(Report.requested_by) == requested_by) report_author = '' if 'report_author' in request.args: report_author = request.args['report_author'] report_author = report_author.strip() if report_author: query = query.filter(Report.report_author == report_author) query = query.order_by(Report.created.desc()) \ reports = query.all() batchLoadAttribute(reports, 'labels') return render_template("report/report_query_summary.html", reports=reports, tags=tagsToQuery, requested_by=requested_by, report_author=report_author)
def renderProbeDetail(probe): hasAssays = probe_hunter.doesProbeHaveAssays(probe._probe_key) batchLoadAttribute(probe._probe_reference_caches, 'sequence_accids') batchLoadAttribute(probe._probe_reference_caches, 'probe_rflv') batchLoadAttribute(probe._probe_reference_caches, 'probe_aliases') batchLoadAttribute(probe._probe_reference_caches, 'refnotechunks') return render_template('detail/probe_detail.html', probe = probe, hasAssays = hasAssays )
def reportSummary(): query = Report.query tagsToQuery = [] if 'tags' in request.args: tags = request.args['tags'] tags = TAG_REGEX.split(tags) for tag in tags: tag = tag.strip() if tag: tag = tag.lower() tagsToQuery.append(tag) query = query.filter(Report.labels.any(ReportLabel.label.in_(tagsToQuery))) requested_by = '' if 'requested_by' in request.args: requested_by = request.args['requested_by'] requested_by = requested_by.strip().lower() if requested_by: query = query.filter(db.func.lower(Report.requested_by)==requested_by) report_author = '' if 'report_author' in request.args: report_author = request.args['report_author'] report_author = report_author.strip() if report_author: query = query.filter(Report.report_author==report_author) query = query.order_by(Report.created.desc()) \ reports = query.all() batchLoadAttribute( reports, 'labels' ) return render_template("report/report_query_summary.html", reports=reports, tags=tagsToQuery, requested_by=requested_by, report_author=report_author)
def renderProbeDetail(probe): hasAssays = probe_hunter.doesProbeHaveAssays(probe._probe_key) childProbe = probe_hunter.getChildProbe(probe._probe_key) batchLoadAttribute(probe._probe_reference_caches, 'sequence_accids') batchLoadAttribute(probe._probe_reference_caches, 'probe_rflv') batchLoadAttribute(probe._probe_reference_caches, 'probe_aliases') batchLoadAttribute(probe._probe_reference_caches, 'refnotechunks') return render_template('detail/probe_detail.html', probe=probe, hasAssays=hasAssays, childProbe=childProbe)
def _prepExperiment(experiment): """ Load any attributes a detail page might need """ if experiment: batchLoadAttribute([experiment], 'marker_assocs') batchLoadAttribute(experiment.marker_assocs, 'marker') batchLoadAttribute(experiment.marker_assocs, 'allele')
def buildChildNodes(vocTerm, ignoreObsoletes=True): """ returns list of child nodes for given parent vocTerm """ children = [] # expand startNode one level down if vocTerm.dagnode: if vocTerm.dagnode.child_edges: # pre-load needed relationships batchLoadAttribute(vocTerm.dagnode.child_edges, "child_node") batchLoadAttribute(vocTerm.dagnode.child_edges, "child_node.vocterm") childNodes = [edge.child_node for edge in vocTerm.dagnode.child_edges] batchLoadAttributeExists(childNodes, ["child_edges"]) childNodes.sort(key=lambda x: x.vocterm.term) for childNode in childNodes: childTerm = childNode.vocterm # skip if obsolete if ignoreObsoletes and childTerm.isobsolete: continue children.append({ 'id': childTerm.primaryid, 'label': childTerm.term }) # check for future children expansion if (childNode.has_child_edges): # set child node as expandable children[-1]['ex'] = True return children
def searchAlleles(refs_id=None, marker_id=None, assay_id=None, limit=None): """ Perform search for Alleles """ query = Allele.query if assay_id: query = query.filter(Allele.assays.any(Assay.mgiid == assay_id)) if marker_id: query = query.join(Allele.marker) marker_accession = db.aliased(Accession) query = query.join(marker_accession, Marker.mgiid_object) query = query.filter(marker_accession.accid == marker_id) if refs_id: jnum_accession = db.aliased(Accession) sub_allele = db.aliased(Allele) sq = db.session.query(sub_allele) \ .join(sub_allele.explicit_references) \ .join(jnum_accession, Reference.jnumid_object) \ .filter(jnum_accession.accid==refs_id) \ .filter(sub_allele._allele_key==Allele._allele_key) \ .correlate(Allele) query = query.filter(sq.exists()) query = query.order_by(Allele.transmission.desc(), Allele.status, Allele.symbol) if limit: query = query.limit(limit) alleles = query.all() # load attributes needed on summary batchLoadAttribute(alleles, "mp_annots") batchLoadAttribute(alleles, "disease_annots") batchLoadAttribute(alleles, "subtypes") batchLoadAttribute(alleles, "synonyms") return alleles
def searchSequences(marker_id=None, limit=None): """ Perform search for Sequence records by marker_id ordered by Sequence._sequence_key """ query = Sequence.query if marker_id: marker_accession = db.aliased(Accession) sub_seq = db.aliased(Sequence) sq = db.session.query(sub_seq) \ .join(sub_seq.markers) \ .join(marker_accession, Marker.mgiid_object) \ .filter(marker_accession.accid==marker_id) \ .filter(sub_seq._sequence_key==Sequence._sequence_key) \ .correlate(Sequence) query = query.filter( sq.exists() ) query = query.order_by(Sequence._sequence_key) if limit: query = query.limit(limit) sequences = query.all() # load any necessary data for summary batchLoadAttribute(sequences, 'markers') batchLoadAttribute(sequences, 'accession_objects') batchLoadAttribute(sequences, 'source') batchLoadAttribute(sequences, 'source.strain') return sequences
def searchSequences(marker_id=None, limit=None): """ Perform search for Sequence records by marker_id ordered by Sequence._sequence_key """ query = Sequence.query if marker_id: marker_accession = db.aliased(Accession) sub_seq = db.aliased(Sequence) sq = db.session.query(sub_seq) \ .join(sub_seq.markers) \ .join(marker_accession, Marker.mgiid_object) \ .filter(marker_accession.accid==marker_id) \ .filter(sub_seq._sequence_key==Sequence._sequence_key) \ .correlate(Sequence) query = query.filter(sq.exists()) query = query.order_by(Sequence._sequence_key) if limit: query = query.limit(limit) sequences = query.all() # load any necessary data for summary batchLoadAttribute(sequences, 'markers') batchLoadAttribute(sequences, 'accession_objects') batchLoadAttribute(sequences, 'source') batchLoadAttribute(sequences, 'source.strain') return sequences
def searchAssays(marker_id=None, allele_id=None, probe_id=None, refs_id=None, antibody_id=None, limit=None): """ Perform search for GXD Assay records by various parameters e.g. Marker nomen, Assay _refs_key ordered by Marker.symbol """ query = Assay.query # join Marker + Reference for the order by clause query = query.join(Assay.marker) query = query.join(Assay.reference) if marker_id: # query Marker MGI ID query = query.filter(Marker.mgiid == marker_id) if allele_id: # query Allele MGI ID allele_accession = db.aliased(Accession) sub_assay = db.aliased(Assay) sq = db.session.query(sub_assay) \ .join(sub_assay.alleles) \ .join(allele_accession, Allele.mgiid_object) \ .filter(allele_accession.accid==allele_id) \ .filter(sub_assay._assay_key==Assay._assay_key) \ .correlate(Assay) query = query.filter(sq.exists()) if probe_id: # query Probe MGI ID probe_accession = db.aliased(Accession) sub_assay = db.aliased(Assay) sq = db.session.query(sub_assay) \ .join(sub_assay.probeprep) \ .join(ProbePrep.probe) \ .join(probe_accession, Probe.mgiid_object) \ .filter(probe_accession.accid==probe_id) \ .filter(sub_assay._assay_key==Assay._assay_key) \ .correlate(Assay) query = query.filter(sq.exists()) if antibody_id: # query Antibody MGI ID antibody_accession = db.aliased(Accession) sub_assay = db.aliased(Assay) sq = db.session.query(sub_assay) \ .join(sub_assay.antibodyprep) \ .join(AntibodyPrep.antibody) \ .join(antibody_accession, Antibody.mgiid_object) \ .filter(antibody_accession.accid==antibody_id) \ .filter(sub_assay._assay_key==Assay._assay_key) \ .correlate(Assay) query = query.filter(sq.exists()) if refs_id: query = query.filter(Reference.jnumid == refs_id) query = query.order_by(Marker.symbol, Assay.assaytype_seq, Reference.authors, Assay.mgiid) if limit: query = query.limit(limit) assays = query.all() # batch load some related data needed on summary page batchLoadAttribute(assays, 'marker') batchLoadAttribute(assays, 'reference') return assays
def searchMarkers(nomen=None, refs_id=None, featuretypes=None, limit=None): """ Perform search for Marker records by various parameters e.g. nomen, _refs_key ordered by Marker.symbol """ query = Marker.query.filter_by(_organism_key=1) if refs_id: reference_accession = db.aliased(Accession) sub_marker = db.aliased(Marker) sq = db.session.query(sub_marker) \ .join(sub_marker.all_references) \ .join(reference_accession, Reference.jnumid_object) \ .filter(reference_accession.accid==refs_id) \ .filter(sub_marker._marker_key==Marker._marker_key) \ .correlate(Marker) query = query.filter(sq.exists()) if featuretypes: ft_vocterm = db.aliased(VocTerm) sub_marker = db.aliased(Marker) sq = db.session.query(sub_marker) \ .join(ft_vocterm, sub_marker.featuretype_vocterms) \ .filter( db.or_( ft_vocterm.term.in_(featuretypes), ft_vocterm.ancestor_vocterms.any(VocTerm.term.in_(featuretypes)) ) ) \ .filter(sub_marker._marker_key==Marker._marker_key) \ .correlate(Marker) query = query.filter(sq.exists()) if nomen: nomen = nomen.lower() # query Marker symbol, name, synonyms # query = query.filter( # db.or_(db.func.lower(Marker.symbol).like(nomen), # db.func.lower(Marker.name).like(nomen), # Marker.synonyms.any(db.func.lower(Synonym.synonym).like(nomen)) # ) # ) query1 = query.filter(db.func.lower(Marker.symbol).like(nomen)) query2 = query.filter(db.func.lower(Marker.name).like(nomen)) query3 = query.filter( Marker.synonyms.any(db.func.lower(Synonym.synonym).like(nomen))) query = query1.union(query2).union(query3) query = query.order_by(Marker.markerstatus, Marker.symbol) if limit: query = query.limit(limit) markers = query.all() # batch load some related data needed on summary page batchLoadAttribute(markers, 'synonyms') #batchLoadAttribute(markers, 'secondary_mgiids') batchLoadAttribute(markers, 'featuretype_vocterms') return markers
def searchProbes(marker_id=None, refs_id=None, probe_name=None, segmenttypes=None, limit=None): """ Perform search for Probe records by various parameters e.g. marker_id, _refs_id ordered by Probe.name """ query = Probe.query if segmenttypes: segtypeAlias = db.aliased(VocTerm) query = query.join(segtypeAlias, Probe.segmenttype_obj) \ .filter(segtypeAlias.term.in_(segmenttypes)) if refs_id: ref_accession = db.aliased(Accession) sub_probe = db.aliased(Probe) sq = db.session.query(sub_probe) \ .join(sub_probe.references) \ .join(ref_accession, Reference.jnumid_object) \ .filter(ref_accession.accid==refs_id) \ .filter(sub_probe._probe_key==Probe._probe_key) \ .correlate(Probe) query = query.filter( sq.exists() ) if marker_id: marker_accession = db.aliased(Accession) sub_probe = db.aliased(Probe) sq = db.session.query(sub_probe) \ .join(sub_probe.markers) \ .join(marker_accession, Marker.mgiid_object) \ .filter(marker_accession.accid==marker_id) \ .filter(sub_probe._probe_key==Probe._probe_key) \ .correlate(Probe) query = query.filter( sq.exists() ) if probe_name: probe_name = probe_name.lower() probeAlias = db.aliased(ProbeAlias) probeRef = db.aliased(ProbeReferenceCache) sub_probe = db.aliased(Probe) alias_sq = db.session.query(sub_probe) \ .join(probeRef, sub_probe._probe_reference_caches) \ .join(probeAlias, probeRef.probe_aliases) \ .filter(db.func.lower(probeAlias.alias).like(probe_name)) \ .filter(sub_probe._probe_key==Probe._probe_key) \ .correlate(Probe) query1 = query.filter(db.func.lower(Probe.name).like(probe_name)) query2 = query.filter(alias_sq.exists()) query = query1.union(query2) query = query.order_by(Probe.name) if limit: query = query.limit(limit) probes = query.all() # batch load some related data needed on summary page batchLoadAttribute(probes, 'source') batchLoadAttribute(probes, 'markers') batchLoadAttribute(probes, 'references') batchLoadAttribute(probes, '_probe_marker_caches') batchLoadAttribute(probes, '_probe_reference_caches') batchLoadAttribute(probes, '_probe_reference_caches.probe_aliases') batchLoadAttribute(probes, 'derivedfrom_probe') # probe_assocs = [] # for probe in probes: # probe_assocs.extend(probe.probe_marker_caches) # batchLoadAttribute(probe_assocs, 'marker') return probes
def renderImagepaneSummary(form): # gather lists of image pages via hunter images = form.searchImages() # batch load relationships with more efficient SQL batchLoadAttribute(images, 'imagepanes') batchLoadAttribute(images, 'imagepanes.insituresults') batchLoadAttribute(images, 'imagepanes.insituresults.specimen') batchLoadAttribute(images, 'imagepanes.insituresults.specimen.assay') batchLoadAttribute(images, 'imagepanes.insituresults.specimen.assay.marker') batchLoadAttribute(images, 'imagepanes.gel_assays') batchLoadAttribute(images, 'imagepanes.gel_assays.marker') # calculate distinct specimen labels for each image pane / assay combo def distinctSpecimenLabels(imagepane, assay): """ Return sorted distinct list of specimen labels for this assay """ specimenLabels = set([]) for result in imagepane.insituresults: if result.specimen.assay.mgiid == assay.mgiid: specimenLabels.add(result.specimen.specimenlabel) specimenLabels = list(specimenLabels) specimenLabels.sort(key=smartAlphaFormat) return specimenLabels return render_template("summary/imagePane/imagepane_summary.html", form=form, images=images, formArgs=form.argString(), distinctSpecimenLabels=distinctSpecimenLabels)
def searchResults(marker_id=None, refs_id=None, direct_structure_id=None, direct_celltype_id=None, page_size=1, page_num=1): # results to be returned results = [] query = _buildResultQuery(marker_id, refs_id, direct_structure_id, direct_celltype_id) results = query.paginate(page_num, page_size, False) batchLoadAttribute(results.items, 'marker') batchLoadAttribute(results.items, 'structure') batchLoadAttribute(results.items, 'celltype') batchLoadAttribute(results.items, 'reference') batchLoadAttribute(results.items, 'assay') batchLoadAttribute(results.items, 'genotype') batchLoadAttribute(results.items, 'specimen') return results
def search(self, search_query): """ Search using a SearchQuery """ search_results = self.gxdindex_dao.search(search_query) results = search_results.items batchLoadAttribute(results, 'marker') batchLoadAttribute(results, 'structure') batchLoadAttribute(results, 'reference') batchLoadAttribute(results, 'assay') batchLoadAttribute(results, 'genotype') batchLoadAttribute(results, 'specimen') return search_results
def searchMarkers(nomen=None, refs_id=None, featuretypes=None, limit=None): """ Perform search for Marker records by various parameters e.g. nomen, _refs_key ordered by Marker.symbol """ query = Marker.query.filter_by(_organism_key=1) if refs_id: reference_accession = db.aliased(Accession) sub_marker = db.aliased(Marker) sq = db.session.query(sub_marker) \ .join(sub_marker.all_references) \ .join(reference_accession, Reference.jnumid_object) \ .filter(reference_accession.accid==refs_id) \ .filter(sub_marker._marker_key==Marker._marker_key) \ .correlate(Marker) query = query.filter( sq.exists() ) if featuretypes: ft_vocterm = db.aliased(VocTerm) sub_marker = db.aliased(Marker) sq = db.session.query(sub_marker) \ .join(ft_vocterm, sub_marker.featuretype_vocterms) \ .filter( db.or_( ft_vocterm.term.in_(featuretypes), ft_vocterm.ancestor_vocterms.any(VocTerm.term.in_(featuretypes)) ) ) \ .filter(sub_marker._marker_key==Marker._marker_key) \ .correlate(Marker) query = query.filter( sq.exists() ) if nomen: nomen = nomen.lower() # query Marker symbol, name, synonyms # query = query.filter( # db.or_(db.func.lower(Marker.symbol).like(nomen), # db.func.lower(Marker.name).like(nomen), # Marker.synonyms.any(db.func.lower(Synonym.synonym).like(nomen)) # ) # ) query1 = query.filter(db.func.lower(Marker.symbol).like(nomen)) query2 = query.filter(db.func.lower(Marker.name).like(nomen)) query3 = query.filter(Marker.synonyms.any(db.func.lower(Synonym.synonym).like(nomen))) query = query1.union(query2).union(query3) query = query.order_by(Marker.markerstatus, Marker.symbol) if limit: query = query.limit(limit) markers = query.all() # batch load some related data needed on summary page batchLoadAttribute(markers, 'synonyms') #batchLoadAttribute(markers, 'secondary_mgiids') batchLoadAttribute(markers, 'featuretype_vocterms') return markers
def searchAssays(marker_id=None, allele_id=None, probe_id=None, refs_id=None, antibody_id=None, limit=None): """ Perform search for GXD Assay records by various parameters e.g. Marker nomen, Assay _refs_key ordered by Marker.symbol """ query = Assay.query # join Marker + Reference for the order by clause query = query.join(Assay.marker) query = query.join(Assay.reference) if marker_id: # query Marker MGI ID query = query.filter(Marker.mgiid==marker_id) if allele_id: # query Allele MGI ID allele_accession = db.aliased(Accession) sub_assay = db.aliased(Assay) sq = db.session.query(sub_assay) \ .join(sub_assay.alleles) \ .join(allele_accession, Allele.mgiid_object) \ .filter(allele_accession.accid==allele_id) \ .filter(sub_assay._assay_key==Assay._assay_key) \ .correlate(Assay) query = query.filter( sq.exists() ) if probe_id: # query Probe MGI ID probe_accession = db.aliased(Accession) sub_assay = db.aliased(Assay) sq = db.session.query(sub_assay) \ .join(sub_assay.probeprep) \ .join(ProbePrep.probe) \ .join(probe_accession, Probe.mgiid_object) \ .filter(probe_accession.accid==probe_id) \ .filter(sub_assay._assay_key==Assay._assay_key) \ .correlate(Assay) query = query.filter( sq.exists() ) if antibody_id: # query Antibody MGI ID antibody_accession = db.aliased(Accession) sub_assay = db.aliased(Assay) sq = db.session.query(sub_assay) \ .join(sub_assay.antibodyprep) \ .join(AntibodyPrep.antibody) \ .join(antibody_accession, Antibody.mgiid_object) \ .filter(antibody_accession.accid==antibody_id) \ .filter(sub_assay._assay_key==Assay._assay_key) \ .correlate(Assay) query = query.filter( sq.exists() ) if refs_id: query = query.filter(Reference.jnumid==refs_id) query = query.order_by(Marker.symbol, Assay.assaytype_seq, Reference.authors, Assay.mgiid) if limit: query = query.limit(limit) assays = query.all() # batch load some related data needed on summary page batchLoadAttribute(assays, 'marker') batchLoadAttribute(assays, 'reference') return assays
def buildDagTrees(dagnodes, batchloadOn=True): """ Builds a list of DAG tree views. one for every path of every dag node passed in """ dagtrees = [] for dagnode in dagnodes: root = TreeNode(dagnode) dagtree = {"name": dagnode.dag_name, "root":root} dagtrees.append(dagtree) # track all the found dag nodes for batch loading # vocterms later on foundNodes = set() # get immediate children # batch load nodes of each fetched edge loadFirstChildren(root, foundNodes, batchloadOn) # recurse parents stack = [dagtree] while stack: dtree = stack.pop() tree_node = dtree['root'] # if node is first parent if len(tree_node.children) == 1 and \ dagnode._object_key in [t._term_key for t in tree_node.children]: original = tree_node.children[0] # load siblings for the original node loadSiblings(original, tree_node, foundNodes, batchloadOn) if tree_node.dagnode.parent_edges: # batch load nodes of each fetched edge if batchloadOn: batchLoadAttribute(tree_node.dagnode.parent_edges, "parent_node") if len(tree_node.dagnode.parent_edges) > 1: # create new trees anytime there is more than 1 parent for edge in tree_node.dagnode.parent_edges[1:]: # take current tree and clone a new one # reset the root of new tree as the current parent edge new_tree = splitTree(dagtree, edge, foundNodes, batchloadOn) stack.append(new_tree) dagtrees.append(new_tree) parent_edge = tree_node.dagnode.parent_edges[0] parent_node = parent_edge.parent_node ptree_node = TreeNode(parent_node) # move root down a notch tree_node.parent = ptree_node tree_node.edge_label = parent_edge.label ptree_node.children.append(tree_node) dtree['root'] = ptree_node stack.append(dtree) # batch load all the term objects for every found node if batchloadOn: batchLoadAttribute(list(foundNodes), "vocterm") #batchLoadAttributeExists(list(foundNodes), ["children"]) # sort all term children for tree in dagtrees: tree['root'].sort() return dagtrees
def searchIndexRecords(marker_id=None, refs_id=None, age=None, assay_type=None, limit=None): # results to be returned results = [] query = GxdIndexRecord.query query = query.join(GxdIndexRecord.marker) #query = query.join(GxdIndexRecord.reference) if marker_id: marker_accession = db.aliased(Accession) sub_result = db.aliased(GxdIndexRecord) sq = db.session.query(sub_result) \ .join(sub_result.marker) \ .join(marker_accession, Marker.mgiid_object) \ .filter(marker_accession.accid==marker_id) \ .filter(sub_result._index_key==GxdIndexRecord._index_key) \ .correlate(GxdIndexRecord) query = query.filter( sq.exists() ) if refs_id: reference_accession = db.aliased(Accession) sub_result = db.aliased(GxdIndexRecord) sq = db.session.query(sub_result) \ .join(sub_result.reference) \ .join(reference_accession, Reference.jnumid_object) \ .filter(reference_accession.accid==refs_id) \ .filter(sub_result._index_key==GxdIndexRecord._index_key) \ .correlate(GxdIndexRecord) query = query.filter( sq.exists() ) if age: query = query.filter( GxdIndexRecord.indexstages.any( GxdIndexStage.stageid==age )) if assay_type: query = query.filter( GxdIndexRecord.indexstages.any( GxdIndexStage.indexassay==assay_type )) query = query.order_by(Marker.symbol, GxdIndexRecord._index_key) if limit: query = query.limit(limit) results = query.all() batchLoadAttribute(results, 'marker') batchLoadAttribute(results, 'reference') batchLoadAttribute(results, 'indexstages') return results
def renderImagepaneSummary(form): # gather lists of image pages via hunter images = form.searchImages() # batch load relationships with more efficient SQL batchLoadAttribute(images, 'imagepanes') batchLoadAttribute(images, 'imagepanes.insituresults') batchLoadAttribute(images, 'imagepanes.insituresults.specimen') batchLoadAttribute(images, 'imagepanes.insituresults.specimen.assay') batchLoadAttribute(images, 'imagepanes.insituresults.specimen.assay.marker') batchLoadAttribute(images, 'imagepanes.gel_assays') batchLoadAttribute(images, 'imagepanes.gel_assays.marker') # calculate distinct specimen labels for each image pane / assay combo def distinctSpecimenLabels(imagepane, assay): """ Return sorted distinct list of specimen labels for this assay """ specimenLabels = set([]) for result in imagepane.insituresults: if result.specimen.assay.mgiid == assay.mgiid: specimenLabels.add(result.specimen.specimenlabel) specimenLabels = list(specimenLabels) specimenLabels.sort(sort.smartAlphaCompare) return specimenLabels return render_template("summary/imagePane/imagepane_summary.html", form=form, images=images, formArgs=form.argString(), distinctSpecimenLabels=distinctSpecimenLabels)
def searchResults(marker_id=None, refs_id=None, direct_structure_id=None, page_size=1, page_num=1): # results to be returned results = [] query = _buildResultQuery(marker_id, refs_id, direct_structure_id) results = query.paginate(page_num, page_size, False) batchLoadAttribute(results.items, 'marker') batchLoadAttribute(results.items, 'structure') batchLoadAttribute(results.items, 'reference') batchLoadAttribute(results.items, 'assay') batchLoadAttribute(results.items, 'genotype') batchLoadAttribute(results.items, 'specimen') return results
def searchProbes(marker_id=None, refs_id=None, probe_name=None, segmenttypes=None, limit=None): """ Perform search for Probe records by various parameters e.g. marker_id, _refs_id ordered by Probe.name """ query = Probe.query if segmenttypes: segtypeAlias = db.aliased(VocTerm) query = query.join(segtypeAlias, Probe.segmenttype_obj) \ .filter(segtypeAlias.term.in_(segmenttypes)) if refs_id: ref_accession = db.aliased(Accession) sub_probe = db.aliased(Probe) sq = db.session.query(sub_probe) \ .join(sub_probe.references) \ .join(ref_accession, Reference.jnumid_object) \ .filter(ref_accession.accid==refs_id) \ .filter(sub_probe._probe_key==Probe._probe_key) \ .correlate(Probe) query = query.filter(sq.exists()) if marker_id: marker_accession = db.aliased(Accession) sub_probe = db.aliased(Probe) sq = db.session.query(sub_probe) \ .join(sub_probe.markers) \ .join(marker_accession, Marker.mgiid_object) \ .filter(marker_accession.accid==marker_id) \ .filter(sub_probe._probe_key==Probe._probe_key) \ .correlate(Probe) query = query.filter(sq.exists()) if probe_name: probe_name = probe_name.lower() probeAlias = db.aliased(ProbeAlias) probeRef = db.aliased(ProbeReferenceCache) sub_probe = db.aliased(Probe) alias_sq = db.session.query(sub_probe) \ .join(probeRef, sub_probe._probe_reference_caches) \ .join(probeAlias, probeRef.probe_aliases) \ .filter(db.func.lower(probeAlias.alias).like(probe_name)) \ .filter(sub_probe._probe_key==Probe._probe_key) \ .correlate(Probe) query1 = query.filter(db.func.lower(Probe.name).like(probe_name)) query2 = query.filter(alias_sq.exists()) query = query1.union(query2) query = query.order_by(Probe.name) if limit: query = query.limit(limit) probes = query.all() # batch load some related data needed on summary page batchLoadAttribute(probes, 'source') batchLoadAttribute(probes, 'markers') batchLoadAttribute(probes, 'references') batchLoadAttribute(probes, '_probe_marker_caches') batchLoadAttribute(probes, '_probe_reference_caches') batchLoadAttribute(probes, '_probe_reference_caches.probe_aliases') batchLoadAttribute(probes, 'derivedfrom_probe') # probe_assocs = [] # for probe in probes: # probe_assocs.extend(probe.probe_marker_caches) # batchLoadAttribute(probe_assocs, 'marker') return probes
def searchIndexRecords(marker_id=None, refs_id=None, age=None, assay_type=None, limit=None): # results to be returned results = [] query = GxdIndexRecord.query query = query.join(GxdIndexRecord.marker) #query = query.join(GxdIndexRecord.reference) if marker_id: marker_accession = db.aliased(Accession) sub_result = db.aliased(GxdIndexRecord) sq = db.session.query(sub_result) \ .join(sub_result.marker) \ .join(marker_accession, Marker.mgiid_object) \ .filter(marker_accession.accid==marker_id) \ .filter(sub_result._index_key==GxdIndexRecord._index_key) \ .correlate(GxdIndexRecord) query = query.filter(sq.exists()) if refs_id: reference_accession = db.aliased(Accession) sub_result = db.aliased(GxdIndexRecord) sq = db.session.query(sub_result) \ .join(sub_result.reference) \ .join(reference_accession, Reference.jnumid_object) \ .filter(reference_accession.accid==refs_id) \ .filter(sub_result._index_key==GxdIndexRecord._index_key) \ .correlate(GxdIndexRecord) query = query.filter(sq.exists()) if age: query = query.filter( GxdIndexRecord.indexstages.any(GxdIndexStage.stageid == age)) if assay_type: query = query.filter( GxdIndexRecord.indexstages.any( GxdIndexStage.indexassay == assay_type)) query = query.order_by(Marker.symbol, GxdIndexRecord._index_key) if limit: query = query.limit(limit) results = query.all() batchLoadAttribute(results, 'marker') batchLoadAttribute(results, 'reference') batchLoadAttribute(results, 'indexstages') return results
def buildDagTrees(dagnodes, batchloadOn=True): """ Builds a list of DAG tree views. one for every path of every dag node passed in """ dagtrees = [] for dagnode in dagnodes: root = TreeNode(dagnode) dagtree = {"name": dagnode.dag_name, "root": root} dagtrees.append(dagtree) # track all the found dag nodes for batch loading # vocterms later on foundNodes = set() # get immediate children # batch load nodes of each fetched edge loadFirstChildren(root, foundNodes, batchloadOn) # recurse parents stack = [dagtree] while stack: dtree = stack.pop() tree_node = dtree['root'] # if node is first parent if len(tree_node.children) == 1 and \ dagnode._object_key in [t._term_key for t in tree_node.children]: original = tree_node.children[0] # load siblings for the original node loadSiblings(original, tree_node, foundNodes, batchloadOn) if tree_node.dagnode.parent_edges: # batch load nodes of each fetched edge if batchloadOn: batchLoadAttribute(tree_node.dagnode.parent_edges, "parent_node") if len(tree_node.dagnode.parent_edges) > 1: # create new trees anytime there is more than 1 parent for edge in tree_node.dagnode.parent_edges[1:]: # take current tree and clone a new one # reset the root of new tree as the current parent edge new_tree = splitTree(dagtree, edge, foundNodes, batchloadOn) stack.append(new_tree) dagtrees.append(new_tree) parent_edge = tree_node.dagnode.parent_edges[0] parent_node = parent_edge.parent_node ptree_node = TreeNode(parent_node) # move root down a notch tree_node.parent = ptree_node tree_node.edge_label = parent_edge.label ptree_node.children.append(tree_node) dtree['root'] = ptree_node stack.append(dtree) # batch load all the term objects for every found node if batchloadOn: batchLoadAttribute(list(foundNodes), "vocterm") #batchLoadAttributeExists(list(foundNodes), ["children"]) # sort all term children for tree in dagtrees: tree['root'].sort() return dagtrees
def _renderGenotypeDetail(genotypes, templateName): """ Generic genotype MP/Disease summary """ # pre-fetch all the evidence and note records batchLoadAttribute(genotypes, 'primaryimagepane') batchLoadAttribute(genotypes, 'mp_annots') batchLoadAttribute(genotypes, 'mp_annots.evidences') batchLoadAttribute(genotypes, 'mp_annots.evidences.notes') batchLoadAttribute(genotypes, 'mp_annots.evidences.properties') batchLoadAttribute(genotypes, 'mp_annots.evidences.notes') batchLoadAttribute(genotypes, 'disease_annots') batchLoadAttribute(genotypes, 'disease_annots.evidences') batchLoadAttribute(genotypes, 'disease_annots.term_object') # load the phenotype specific information and organize it # into mp_headers objects genotype_mp_hunter.loadPhenotypeData(genotypes) return render_template(templateName, genotypes=genotypes)
def _renderGenotypeDetail(genotypes, templateName): """ Generic genotype MP/Disease summary """ # pre-fetch all the evidence and note records batchLoadAttribute(genotypes, 'primaryimagepane') batchLoadAttribute(genotypes, 'mp_annots') batchLoadAttribute(genotypes, 'mp_annots.evidences') batchLoadAttribute(genotypes, 'mp_annots.evidences.notes') batchLoadAttribute(genotypes, 'mp_annots.evidences.properties') batchLoadAttribute(genotypes, 'mp_annots.evidences.notes.chunks') batchLoadAttribute(genotypes, 'disease_annots') batchLoadAttribute(genotypes, 'disease_annots.evidences') batchLoadAttribute(genotypes, 'disease_annots.term_object') batchLoadAttribute(genotypes, 'disease_annots_DO') batchLoadAttribute(genotypes, 'disease_annots_DO.evidences') batchLoadAttribute(genotypes, 'disease_annots_DO.term_object') # load the phenotype specific information and organize it # into mp_headers objects genotype_mp_hunter.loadPhenotypeData(genotypes) return render_template(templateName, genotypes = genotypes)
def reportIndex(): reports = Report.query.order_by(Report.name).all() batchLoadAttribute(reports, 'labels') return render_template("report/index.html", reports=reports)