def search(self): parser = SearchParamParser(request.params) params, errors = parser.parse() if errors: response.status = 400 return {'errors': errors} expand_facets = params.pop('expand_facet_dimensions') datasets = params.pop('dataset', None) if datasets is None: datasets = model.Dataset.all_by_account(c.account) expand_facets = False for dataset in datasets: require.dataset.read(dataset) b = Browser(**params) stats, facets, entries = b.execute() entries = [entry_apply_links(d.name, e) for d, e in entries] if expand_facets and len(datasets) == 1: _expand_facets(facets, datasets[0]) return { 'stats': stats, 'facets': facets, 'results': entries }
def search(self): parser = SearchParamParser(request.params) params, errors = parser.parse() if errors: response.status = 400 return {'errors': errors} expand_facets = params.pop('expand_facet_dimensions') datasets = params.pop('dataset', None) if datasets is None or not len(datasets): q = model.Dataset.all_by_account(c.account) if params.get('category'): q = q.filter_by(category=params.pop('category')) datasets = q.all() expand_facets = False if not len(datasets): return {'errors': [_("No dataset available.")]} params['filter']['dataset'] = [] for dataset in datasets: require.dataset.read(dataset) params['filter']['dataset'].append(dataset.name) response.last_modified = max([d.updated_at for d in datasets]) etag_cache_keygen(parser.key(), response.last_modified) b = Browser(**params) try: stats, facets, entries = b.execute() except SolrException, e: return {'errors': [unicode(e)]}
def test_expand_facet_dimensions(self): # Expand facet dimensions should default to False out, err = SearchParamParser({}).parse() assert not out['expand_facet_dimensions'] # If expand_facet_dimension param is provided we should return True out, err = SearchParamParser({'expand_facet_dimensions': ''}).parse() assert out['expand_facet_dimensions']
def test_dataset(self, model_mock): def _mock_dataset(name): if name == 'baz': return None ds = Mock() ds.name = name return ds model_mock.by_name.side_effect = _mock_dataset out, err = SearchParamParser({'dataset': 'foo|bar'}).parse() assert [x.name for x in out['dataset']] == ['foo', 'bar'] out, err = SearchParamParser({'dataset': 'baz'}).parse() assert 'no dataset with name "baz"' in err[0]
def test_facet_page_fractional(self): out, err = SearchParamParser({'facet_page': '1.7'}).parse() assert out['facet_page'] == 1.7 out, err = SearchParamParser({'facet_page': '0.6'}).parse() assert out['facet_page'] == 1
def test_facet_page(self): out, err = SearchParamParser({'facet_page': '14'}).parse() assert out['facet_page'] == 14
def test_category(self): out, err = SearchParamParser({'category': 'banana'}).parse() assert 'category' not in out out, err = SearchParamParser({'category': 'spending'}).parse() assert out['category'] == 'spending'
def test_facet_field(self): out, err = SearchParamParser({'facet_field': 'foo|bar|baz'}).parse() h.assert_equal(out['facet_field'], ['foo', 'bar', 'baz'])
def test_filter(self): out, err = SearchParamParser({'filter': 'foo:one|bar:two'}).parse() assert out['filter'] == {'foo': 'one', 'bar': 'two'} out, err = SearchParamParser({'filter': 'foo:one|bar'}).parse() assert 'Wrong format for "filter"' in err[0]
def test_expand_facet_dimensions(self): out, err = SearchParamParser({}).parse() h.assert_equal(out['expand_facet_dimensions'], False) out, err = SearchParamParser({'expand_facet_dimensions': ''}).parse() h.assert_equal(out['expand_facet_dimensions'], True)
def test_facet_pagesize(self): out, err = SearchParamParser({'facet_pagesize': '73'}).parse() h.assert_equal(out['facet_pagesize'], 73) out, err = SearchParamParser({'facet_pagesize': '140'}).parse() h.assert_equal(out['facet_pagesize'], 100)
def test_facet_page_fractional(self): out, err = SearchParamParser({'facet_page': '1.7'}).parse() h.assert_equal(out['facet_page'], 1.7) out, err = SearchParamParser({'facet_page': '0.6'}).parse() h.assert_equal(out['facet_page'], 1)
def test_facet_page(self): out, err = SearchParamParser({'facet_page': '14'}).parse() h.assert_equal(out['facet_page'], 14)
def test_category(self): out, err = SearchParamParser({'category': 'banana'}).parse() h.assert_equal('category' in out, False) out, err = SearchParamParser({'category': 'spending'}).parse() h.assert_equal(out['category'], 'spending')
def test_facet_pagesize(self): out, err = SearchParamParser({'facet_pagesize': '73'}).parse() assert out['facet_pagesize'] == 73 out, err = SearchParamParser({'facet_pagesize': '140'}).parse() assert out['facet_pagesize'] == 100
def search(self): parser = SearchParamParser(request.params) params, errors = parser.parse() if errors: response.status = 400 return to_jsonp({'errors': errors}) expand_facets = params.pop('expand_facet_dimensions') format = params.pop('format') if format == 'csv': params['stats'] = False params['facet_field'] = None datasets = params.pop('dataset', None) if datasets is None or not datasets: q = model.Dataset.all_by_account(c.account) if params.get('category'): q = q.filter_by(category=params.pop('category')) datasets = q.all() expand_facets = False if not datasets: return {'errors': ["No dataset available."]} params['filter']['dataset'] = [] for dataset in datasets: require.dataset.read(dataset) params['filter']['dataset'].append(dataset.name) response.last_modified = max([d.updated_at for d in datasets]) etag_cache_keygen(parser.key(), response.last_modified) self._response_params(params) if params['pagesize'] > parser.defaults['pagesize']: # http://wiki.nginx.org/X-accel#X-Accel-Buffering response.headers['X-Accel-Buffering'] = 'no' if format == 'csv': csv_headers(response, 'entries.csv') streamer = CSVStreamingResponse( datasets, params, pagesize=parser.defaults['pagesize'] ) return streamer.response() else: json_headers(filename='entries.json') streamer = JSONStreamingResponse( datasets, params, pagesize=parser.defaults['pagesize'], expand_facets=_expand_facets if expand_facets else None, callback=request.params.get('callback') ) return streamer.response() b = Browser(**params) try: b.execute() except SolrException, e: return {'errors': [unicode(e)]}
def test_facet_field(self): out, err = SearchParamParser({'facet_field': 'foo|bar|baz'}).parse() assert out['facet_field'] == ['foo', 'bar', 'baz']
def search(self): parser = SearchParamParser(request.params) params, errors = parser.parse() if errors: response.status = 400 return to_jsonp({'errors': errors}) expand_facets = params.pop('expand_facet_dimensions') format = params.pop('format') if format == 'csv': params['stats'] = False params['facet_field'] = None datasets = params.pop('dataset', None) if datasets is None or not datasets: q = model.Dataset.all_by_account(c.account) if params.get('category'): q = q.filter_by(category=params.pop('category')) datasets = q.all() expand_facets = False if not datasets: return {'errors': ["No dataset available."]} params['filter']['dataset'] = [] for dataset in datasets: require.dataset.read(dataset) params['filter']['dataset'].append(dataset.name) response.last_modified = max([d.updated_at for d in datasets]) etag_cache_keygen(parser.key(), response.last_modified) self._response_params(params) if params['pagesize'] > parser.defaults['pagesize']: # http://wiki.nginx.org/X-accel#X-Accel-Buffering response.headers['X-Accel-Buffering'] = 'no' if format == 'csv': csv_headers(response, 'entries.csv') streamer = CSVStreamingResponse( datasets, params, pagesize=parser.defaults['pagesize']) return streamer.response() else: json_headers(filename='entries.json') streamer = JSONStreamingResponse( datasets, params, pagesize=parser.defaults['pagesize'], expand_facets=_expand_facets if expand_facets else None, callback=request.params.get('callback')) return streamer.response() b = Browser(**params) try: b.execute() except SolrException, e: return {'errors': [unicode(e)]}
def test_filter(self): out, err = SearchParamParser({'filter': 'foo:one|bar:two'}).parse() h.assert_equal(out['filter'], {'foo': 'one', 'bar': 'two'}) out, err = SearchParamParser({'filter': 'foo:one|bar'}).parse() h.assert_true('Wrong format for "filter"' in err[0])