def test_facets_page_pagesize(self): b = Browser(facet_field=["one"], facet_page=2, facet_pagesize=50) b.execute() ignore, solr_args = self.conn.raw_query.call_args assert solr_args["facet.offset"] == 50 assert solr_args["facet.limit"] == 50
def index(self, dataset, format='html'): # Get the dataset into the context variable 'c' self._get_dataset(dataset) # If the format is either json or csv we direct the user to the search # API instead if format in ['json', 'csv']: return redirect(h.url_for(controller='api/version2', action='search', format=format, dataset=dataset, **request.params)) # Get the default view handle_request(request, c, c.dataset) # Parse the parameters using the SearchParamParser (used by the API) parser = EntryIndexParamParser(request.params) params, errors = parser.parse() # We have to remove page from the parameters because that's also # used in the Solr browser (which fetches the queries) params.pop('page') # We limit ourselve to only our dataset params['filter']['dataset'] = [c.dataset.name] facet_dimensions = {field.name: field for field in c.dataset.dimensions if field.facet} params['facet_field'] = facet_dimensions.keys() # Create a Solr browser and execute it b = Browser(**params) try: b.execute() except SolrException as e: return {'errors': [unicode(e)]} # Get the entries, each item is a tuple of the dataset and entry solr_entries = b.get_entries() entries = [entry for (dataset, entry) in solr_entries] # Get expanded facets for this dataset, c.facets = b.get_expanded_facets(c.dataset) # Create a pager for the entries c.entries = templating.Page(entries, **request.params) # Set the search word and default to empty string c.search = params.get('q', '') # Set filters (but remove the dataset as we don't need it) c.filters = params['filter'] del c.filters['dataset'] # We also make the facet dimensions and dimension names available c.facet_dimensions = facet_dimensions c.dimensions = [dimension.name for dimension in c.dataset.dimensions] # Render the entries page return templating.render('entry/index.html')
def test_page_pagesize(self): b = Browser(page=2, pagesize=50) b.execute() ignore, solr_args = self.conn.raw_query.call_args assert solr_args['start'] == 50 assert solr_args['rows'] == 50
def search(self): parser = SearchParamParser(request.params) params, errors = parser.parse() if errors: response.status = 400 return {'errors': errors} expand_facets = params.pop('expand_facet_dimensions') datasets = params.pop('dataset', None) if datasets is None: datasets = model.Dataset.all_by_account(c.account) expand_facets = False for dataset in datasets: require.dataset.read(dataset) b = Browser(**params) stats, facets, entries = b.execute() entries = [entry_apply_links(d.name, e) for d, e in entries] if expand_facets and len(datasets) == 1: _expand_facets(facets, datasets[0]) return { 'stats': stats, 'facets': facets, 'results': entries }
def test_filter_union(self): f = {'foo': ['bar', 'baz']} b = Browser(filter=f) b.execute() ignore, solr_args = self.conn.raw_query.call_args assert '+foo:"bar" OR +foo:"baz"' in solr_args['fq']
def test_filter_union(self): f = {"foo": ["bar", "baz"]} b = Browser(filter=f) b.execute() ignore, solr_args = self.conn.raw_query.call_args assert '+foo:"bar" OR +foo:"baz"' in solr_args["fq"]
def search(self): parser = SearchParamParser(request.params) params, errors = parser.parse() if errors: response.status = 400 return {'errors': errors} expand_facets = params.pop('expand_facet_dimensions') datasets = params.pop('dataset', None) if datasets is None or not len(datasets): q = model.Dataset.all_by_account(c.account) if params.get('category'): q = q.filter_by(category=params.pop('category')) datasets = q.all() expand_facets = False if not len(datasets): return {'errors': [_("No dataset available.")]} params['filter']['dataset'] = [] for dataset in datasets: require.dataset.read(dataset) params['filter']['dataset'].append(dataset.name) response.last_modified = max([d.updated_at for d in datasets]) etag_cache_keygen(parser.key(), response.last_modified) b = Browser(**params) try: stats, facets, entries = b.execute() except SolrException, e: return {'errors': [unicode(e)]}
def test_page_pagesize(self): b = Browser(page=2, pagesize=50) b.execute() ignore, solr_args = self.conn.raw_query.call_args assert solr_args["start"] == 50 assert solr_args["rows"] == 50
def test_facets_page_pagesize(self): b = Browser(facet_field=['one'], facet_page=2, facet_pagesize=50) b.execute() ignore, solr_args = self.conn.raw_query.call_args assert solr_args['facet.offset'] == 50 assert solr_args['facet.limit'] == 50
def test_page_pagesize(self): b = Browser(page=2, pagesize=50) b.execute() _, solr_args = self.conn.raw_query.call_args h.assert_equal(solr_args['start'], 50) h.assert_equal(solr_args['rows'], 50)
def test_facets_page_pagesize(self): b = Browser(facet_field=['one'], facet_page=2, facet_pagesize=50) b.execute() _, solr_args = self.conn.raw_query.call_args h.assert_equal(solr_args['facet.offset'], 50) h.assert_equal(solr_args['facet.limit'], 50)
def test_filter_union(self): f = {'foo': ['bar', 'baz']} b = Browser(filter=f) b.execute() _, solr_args = self.conn.raw_query.call_args h.assert_true('+foo:"bar" OR +foo:"baz"' in solr_args['fq'])
def test_fractional_page_pagesize(self): b = Browser(page=2.5, pagesize=50) b.execute() ignore, solr_args = self.conn.raw_query.call_args assert solr_args['start'] == 75 assert solr_args['rows'] == 50
def test_entries_order(self): self.conn.raw_query.return_value = make_response([1, 2, 3]) self.dataset.entries.return_value = make_entries([3, 1, 2]) b = Browser() _, _, entries = b.execute() h.assert_equal(map(lambda (a, b): b, entries), make_entries([1, 2, 3]))
def test_filter(self): f = {'foo': 'bar', 'baz': 'with "quotes"'} b = Browser(filter=f) b.execute() ignore, solr_args = self.conn.raw_query.call_args assert '+foo:"bar"' in solr_args['fq'] assert '+baz:"with \\"quotes\\""' in solr_args['fq']
def test_filter(self): f = {"foo": "bar", "baz": 'with "quotes"'} b = Browser(filter=f) b.execute() ignore, solr_args = self.conn.raw_query.call_args assert '+foo:"bar"' in solr_args["fq"] assert '+baz:"with \\"quotes\\""' in solr_args["fq"]
def test_filter(self): f = {'foo': 'bar', 'baz': 'with "quotes"'} b = Browser(filter=f) b.execute() _, solr_args = self.conn.raw_query.call_args h.assert_true('+foo:"bar"' in solr_args['fq']) h.assert_true('+baz:"with \\"quotes\\""' in solr_args['fq'])
def test_entries_order(self): self.conn.raw_query.return_value = make_response([1, 2, 3]) self.dataset.entries.return_value = make_entries([3, 1, 2]) b = Browser() b.execute() entries = b.get_entries() assert map(lambda a_b: a_b[1], entries) == make_entries([1, 2, 3])
def test_facets(self): b = Browser(facet_field=['foo', 'bar']) b.execute() ignore, solr_args = self.conn.raw_query.call_args assert solr_args['facet'] == 'true' assert solr_args['facet.mincount'] == 1 assert solr_args['facet.sort'] == 'count' assert solr_args['facet.field'] == ['foo', 'bar']
def test_entries_stats(self): self.conn.raw_query.return_value = make_response([1, 2, 3]) self.dataset.entries.return_value = make_entries([3, 1, 2]) b = Browser() stats, _, _ = b.execute() h.assert_equal(stats['results_count'], 3) h.assert_equal(stats['results_count_query'], 1234)
def test_facets(self): b = Browser(facet_field=['foo', 'bar']) b.execute() _, solr_args = self.conn.raw_query.call_args h.assert_equal(solr_args['facet'], 'true') h.assert_equal(solr_args['facet.mincount'], 1) h.assert_equal(solr_args['facet.sort'], 'count') h.assert_equal(solr_args['facet.field'], ['foo', 'bar'])
def test_fractional_page_pagesize(self): b = Browser(page=2.5, pagesize=50) b.execute() _, solr_args = self.conn.raw_query.call_args # Use assert_is rather than assert_equal to verify # that it's an integer. h.assert_is(solr_args['start'], 75) h.assert_equal(solr_args['rows'], 50)
def test_entries_order(self): self.conn.raw_query.return_value = make_response([1, 2, 3]) self.dataset.entries.return_value = make_entries([3, 1, 2]) b = Browser() b.execute() entries = b.get_entries() h.assert_equal(map(lambda (a, b): b, entries), make_entries([1, 2, 3]))
def test_facets(self): b = Browser(facet_field=["foo", "bar"]) b.execute() ignore, solr_args = self.conn.raw_query.call_args assert solr_args["facet"] == "true" assert solr_args["facet.mincount"] == 1 assert solr_args["facet.sort"] == "count" assert solr_args["facet.field"] == ["foo", "bar"]
def test_entries_stats(self): self.conn.raw_query.return_value = make_response([1, 2, 3]) self.dataset.entries.return_value = make_entries([3, 1, 2]) b = Browser() b.execute() stats = b.get_stats() h.assert_equal(stats['results_count'], 3) h.assert_equal(stats['results_count_query'], 1234)
def test_entries_stats(self): self.conn.raw_query.return_value = make_response([1, 2, 3]) self.dataset.entries.return_value = make_entries([3, 1, 2]) b = Browser() b.execute() stats = b.get_stats() assert stats["results_count"] == 3 assert stats["results_count_query"] == 1234
def test_simple_query(self): b = Browser() b.execute() ignore, solr_args = self.conn.raw_query.call_args assert solr_args["q"] == "*:*" assert solr_args["fq"] == [] assert solr_args["wt"] == "json" assert solr_args["fl"] == "id, dataset" assert solr_args["sort"] == "score desc, amount desc" assert solr_args["start"] == 0 assert solr_args["rows"] == 100
def test_simple_query(self): b = Browser() b.execute() _, solr_args = self.conn.raw_query.call_args h.assert_equal(solr_args['q'], '*:*') h.assert_equal(solr_args['fq'], []) h.assert_equal(solr_args['wt'], 'json') h.assert_equal(solr_args['fl'], 'id, dataset') h.assert_equal(solr_args['sort'], 'score desc, amount desc') h.assert_equal(solr_args['start'], 0) h.assert_equal(solr_args['rows'], 100)
def test_simple_query(self): b = Browser() b.execute() ignore, solr_args = self.conn.raw_query.call_args assert solr_args['q'] == '*:*' assert solr_args['fq'] == [] assert solr_args['wt'] == 'json' assert solr_args['fl'] == 'id, dataset' assert solr_args['sort'] == 'score desc, amount desc' assert solr_args['start'] == 0 assert solr_args['rows'] == 100
def test_defaults(self): b = Browser() assert b.params['q'] == '' assert b.params['page'] == 1 assert b.params['pagesize'] == 100 assert b.params['filter'] == {} assert b.params['facet_field'] == []
def test_defaults(self): b = Browser() h.assert_equal(b.params['q'], '') h.assert_equal(b.params['page'], 1) h.assert_equal(b.params['pagesize'], 100) h.assert_equal(b.params['filter'], {}) h.assert_equal(b.params['facet_field'], [])
def index(self, dataset, format='html'): # Get the dataset into the context variable 'c' self._get_dataset(dataset) # If the format is either json or csv we direct the user to the search # API instead if format in ['json', 'csv']: return redirect(h.url_for(controller='api/version2', action='search', format=format, dataset=dataset, **request.params)) # Get the default view handle_request(request, c, c.dataset) # Parse the parameters using the SearchParamParser (used by the API) parser = EntryIndexParamParser(request.params) params, errors = parser.parse() # We have to remove page from the parameters because that's also # used in the Solr browser (which fetches the queries) params.pop('page') # We limit ourselve to only our dataset params['filter']['dataset'] = [c.dataset.name] facet_dimensions = {field.name:field\ for field in c.dataset.dimensions \ if field.facet} params['facet_field'] = facet_dimensions.keys() # Create a Solr browser and execute it b = Browser(**params) try: b.execute() except SolrException, e: return {'errors': [unicode(e)]}
def get_browser(self, page): current = dict(self.params) current['pagesize'] = self.pagesize current['page'] = page self.browser = Browser(**current) return self.browser
def test_order(self): b = Browser(order=[('amount', False), ('something.id', True)]) b.execute() ignore, solr_args = self.conn.raw_query.call_args assert solr_args['sort'] == 'amount asc, something.id desc'
def test_order(self): b = Browser(order=[('amount', False), ('something.id', True)]) b.execute() _, solr_args = self.conn.raw_query.call_args h.assert_equal(solr_args['sort'], 'amount asc, something.id desc')
def index(self, dataset, format='html'): # Get the dataset into the context variable 'c' self._get_dataset(dataset) # If the format is either json or csv we direct the user to the search # API instead if format in ['json', 'csv']: return redirect( h.url_for(controller='api/version2', action='search', format=format, dataset=dataset, **request.params)) # Get the default view handle_request(request, c, c.dataset) # Parse the parameters using the SearchParamParser (used by the API) parser = EntryIndexParamParser(request.params) params, errors = parser.parse() # We have to remove page from the parameters because that's also # used in the Solr browser (which fetches the queries) params.pop('page') # We limit ourselve to only our dataset params['filter']['dataset'] = [c.dataset.name] facet_dimensions = { field.name: field for field in c.dataset.dimensions if field.facet } params['facet_field'] = facet_dimensions.keys() # Create a Solr browser and execute it b = Browser(**params) try: b.execute() except SolrException as e: return {'errors': [unicode(e)]} # Get the entries, each item is a tuple of (dataset, entry) solr_entries = b.get_entries() # We are only interested in the entry in the tuple since we know # the dataset entries = [entry[1] for entry in solr_entries] # Get expanded facets for this dataset, c.facets = b.get_expanded_facets(c.dataset) # Create a pager for the entries c.entries = templating.Page(entries, **request.params) # Set the search word and default to empty string c.search = params.get('q', '') # Set filters (but remove the dataset as we don't need it) c.filters = params['filter'] del c.filters['dataset'] # We also make the facet dimensions and dimension names available c.facet_dimensions = facet_dimensions c.dimensions = [dimension.name for dimension in c.dataset.dimensions] # Render the entries page return templating.render('entry/index.html')
def search(self): parser = SearchParamParser(request.params) params, errors = parser.parse() if errors: response.status = 400 return to_jsonp({'errors': errors}) expand_facets = params.pop('expand_facet_dimensions') format = params.pop('format') if format == 'csv': params['stats'] = False params['facet_field'] = None datasets = params.pop('dataset', None) if datasets is None or not datasets: q = model.Dataset.all_by_account(c.account) if params.get('category'): q = q.filter_by(category=params.pop('category')) datasets = q.all() expand_facets = False if not datasets: return {'errors': ["No dataset available."]} params['filter']['dataset'] = [] for dataset in datasets: require.dataset.read(dataset) params['filter']['dataset'].append(dataset.name) response.last_modified = max([d.updated_at for d in datasets]) etag_cache_keygen(parser.key(), response.last_modified) self._response_params(params) if params['pagesize'] > parser.defaults['pagesize']: # http://wiki.nginx.org/X-accel#X-Accel-Buffering response.headers['X-Accel-Buffering'] = 'no' if format == 'csv': csv_headers(response, 'entries.csv') streamer = CSVStreamingResponse( datasets, params, pagesize=parser.defaults['pagesize'] ) return streamer.response() else: json_headers(filename='entries.json') streamer = JSONStreamingResponse( datasets, params, pagesize=parser.defaults['pagesize'], expand_facets=_expand_facets if expand_facets else None, callback=request.params.get('callback') ) return streamer.response() b = Browser(**params) try: b.execute() except SolrException, e: return {'errors': [unicode(e)]}
def search(self): parser = SearchParamParser(request.params) params, errors = parser.parse() if errors: response.status = 400 return to_jsonp({'errors': errors}) expand_facets = params.pop('expand_facet_dimensions') format = params.pop('format') if format == 'csv': params['stats'] = False params['facet_field'] = None datasets = params.pop('dataset', None) if datasets is None or not datasets: q = model.Dataset.all_by_account(c.account) if params.get('category'): q = q.filter_by(category=params.pop('category')) datasets = q.all() expand_facets = False if not datasets: return {'errors': ["No dataset available."]} params['filter']['dataset'] = [] for dataset in datasets: require.dataset.read(dataset) params['filter']['dataset'].append(dataset.name) response.last_modified = max([d.updated_at for d in datasets]) etag_cache_keygen(parser.key(), response.last_modified) self._response_params(params) if params['pagesize'] > parser.defaults['pagesize']: # http://wiki.nginx.org/X-accel#X-Accel-Buffering response.headers['X-Accel-Buffering'] = 'no' if format == 'csv': csv_headers(response, 'entries.csv') streamer = CSVStreamingResponse( datasets, params, pagesize=parser.defaults['pagesize']) return streamer.response() else: json_headers(filename='entries.json') streamer = JSONStreamingResponse( datasets, params, pagesize=parser.defaults['pagesize'], expand_facets=_expand_facets if expand_facets else None, callback=request.params.get('callback')) return streamer.response() b = Browser(**params) try: b.execute() except SolrException, e: return {'errors': [unicode(e)]}
def test_order(self): b = Browser(order=[("amount", False), ("something.id", True)]) b.execute() ignore, solr_args = self.conn.raw_query.call_args assert solr_args["sort"] == "amount asc, something.id desc"