def _run_query(self, sqa_query): """ run SQLAlachemy query, return SearchResults """ search_results = SearchResults() search_results.items = sqa_query.all() search_results.total_count = len(search_results.items) return search_results
def get_clipboard_items(self, _user_key): """ Returns all EMAPA clipboard SetMembers for the given user """ search_results = SearchResults() items = SetMember.query.filter_by(_set_key=self.emapa_clipboard_set_key) \ .filter_by(_createdby_key=_user_key) \ .order_by(SetMember.sequencenum) \ .all() search_results.items = items search_results.total_count = len(items) return search_results
def download_raw_samples(self, experiment_primaryid): url = 'http://www.ebi.ac.uk/arrayexpress/json/v3/experiments/%s/samples' % experiment_primaryid urlreader = UrlReader() logger.debug("Preparing to read: %s" % url) response = urlreader.get(url) logger.debug("Read URL, status code (%s), content length(%d bytes)" % (response.statusCode, len(response.content))) if response.statusCode != 200: raise Exception("Bad status code (%d) received when retrieving samples" % response.statusCode) search_result = SearchResults() search_result.items = JsonHelper().fromJson(response.content)['experiment']['sample'] search_result.items = self.consolidate_samples(search_result.items) search_result.total_count = len(search_result.items) return search_result
def _run_paginated_query(self, sqa_query, paginator): """ run SQLAlchemy query.paginate(), return SearchResults set paginator on SearchResults """ search_results = SearchResults() logger.debug("running paginated query, page_num=%s, page_size=%s" \ % (paginator.page_num, paginator.page_size)) pagination = sqa_query.paginate( paginator.page_num, paginator.page_size, False ) search_results.items = pagination.items search_results.total_count = pagination.total search_results.paginator = paginator return search_results