def test_entries_stats(self): self.conn.raw_query.return_value = make_response([1, 2, 3]) self.dataset.entries.return_value = make_entries([3, 1, 2]) b = Browser() b.execute() stats = b.get_stats() h.assert_equal(stats['results_count'], 3) h.assert_equal(stats['results_count_query'], 1234)
def test_entries_stats(self): self.conn.raw_query.return_value = make_response([1, 2, 3]) self.dataset.entries.return_value = make_entries([3, 1, 2]) b = Browser() b.execute() stats = b.get_stats() assert stats["results_count"] == 3 assert stats["results_count_query"] == 1234
def search(self): parser = SearchParamParser(request.params) params, errors = parser.parse() if errors: response.status = 400 return to_jsonp({'errors': errors}) expand_facets = params.pop('expand_facet_dimensions') format = params.pop('format') if format == 'csv': params['stats'] = False params['facet_field'] = None datasets = params.pop('dataset', None) if datasets is None or not datasets: q = Dataset.all_by_account(c.account) if params.get('category'): q = q.filter_by(category=params.pop('category')) datasets = q.all() expand_facets = False if not datasets: return {'errors': ["No dataset available."]} params['filter']['dataset'] = [] for dataset in datasets: require.dataset.read(dataset) params['filter']['dataset'].append(dataset.name) response.last_modified = max([d.updated_at for d in datasets]) etag_cache_keygen(parser.key(), response.last_modified) if params['pagesize'] > parser.defaults['pagesize']: # http://wiki.nginx.org/X-accel#X-Accel-Buffering response.headers['X-Accel-Buffering'] = 'no' if format == 'csv': csv_headers(response, 'entries.csv') streamer = CSVStreamingResponse( datasets, params, pagesize=parser.defaults['pagesize'] ) return streamer.response() else: json_headers(filename='entries.json') streamer = JSONStreamingResponse( datasets, params, pagesize=parser.defaults['pagesize'], expand_facets=util.expand_facets if expand_facets else None, callback=request.params.get('callback') ) return streamer.response() solr_browser = Browser(**params) try: solr_browser.execute() except SolrException as e: return {'errors': [unicode(e)]} entries = [] for dataset, entry in solr_browser.get_entries(): entry = entry_apply_links(dataset.name, entry) entry['dataset'] = dataset_apply_links(dataset.as_dict()) entries.append(entry) if format == 'csv': return write_csv(entries, response, filename='entries.csv') if expand_facets and len(datasets) == 1: facets = solr_browser.get_expanded_facets(datasets[0]) else: facets = solr_browser.get_facets() return to_jsonp({ 'stats': solr_browser.get_stats(), 'facets': facets, 'results': entries })
def search(self): parser = SearchParamParser(request.params) params, errors = parser.parse() if errors: response.status = 400 return to_jsonp({'errors': errors}) expand_facets = params.pop('expand_facet_dimensions') format = params.pop('format') if format == 'csv': params['stats'] = False params['facet_field'] = None datasets = params.pop('dataset', None) if datasets is None or not datasets: q = Dataset.all_by_account(c.account) if params.get('category'): q = q.filter_by(category=params.pop('category')) datasets = q.all() expand_facets = False if not datasets: return {'errors': ["No dataset available."]} params['filter']['dataset'] = [] for dataset in datasets: require.dataset.read(dataset) params['filter']['dataset'].append(dataset.name) response.last_modified = max([d.updated_at for d in datasets]) etag_cache_keygen(parser.key(), response.last_modified) if params['pagesize'] > parser.defaults['pagesize']: # http://wiki.nginx.org/X-accel#X-Accel-Buffering response.headers['X-Accel-Buffering'] = 'no' if format == 'csv': csv_headers(response, 'entries.csv') streamer = CSVStreamingResponse( datasets, params, pagesize=parser.defaults['pagesize']) return streamer.response() else: json_headers(filename='entries.json') streamer = JSONStreamingResponse( datasets, params, pagesize=parser.defaults['pagesize'], expand_facets=util.expand_facets if expand_facets else None, callback=request.params.get('callback')) return streamer.response() solr_browser = Browser(**params) try: solr_browser.execute() except SolrException as e: return {'errors': [unicode(e)]} entries = [] for dataset, entry in solr_browser.get_entries(): entry = entry_apply_links(dataset.name, entry) entry['dataset'] = dataset_apply_links(dataset.as_dict()) entries.append(entry) if format == 'csv': return write_csv(entries, response, filename='entries.csv') if expand_facets and len(datasets) == 1: facets = solr_browser.get_expanded_facets(datasets[0]) else: facets = solr_browser.get_facets() return to_jsonp({ 'stats': solr_browser.get_stats(), 'facets': facets, 'results': entries })