def test_returns_503_when_es_unavailable(self, monkeypatch, db_request, metrics): params = MultiDict({"page": 15}) db_request.params = params es_query = pretend.stub() db_request.es = pretend.stub(query=lambda *a, **kw: es_query) def raiser(*args, **kwargs): raise elasticsearch.ConnectionError() monkeypatch.setattr(views, "ElasticsearchPage", raiser) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) with pytest.raises(HTTPServiceUnavailable): search(db_request) assert url_maker_factory.calls == [pretend.call(db_request)] assert metrics.increment.calls == [ pretend.call("warehouse.views.search.error") ] assert metrics.histogram.calls == []
def test_with_classifiers(self, monkeypatch, db_request, metrics, page): params = MultiDict([("q", "foo bar"), ("c", "foo :: bar"), ("c", "fiz :: buz")]) if page is not None: params["page"] = page db_request.params = params es_query = pretend.stub() db_request.es = pretend.stub() get_es_query = pretend.call_recorder(lambda *a, **kw: es_query) monkeypatch.setattr(views, "get_es_query", get_es_query) classifier1 = ClassifierFactory.create(classifier="foo :: bar") classifier2 = ClassifierFactory.create(classifier="foo :: baz") classifier3 = ClassifierFactory.create(classifier="fiz :: buz") project = ProjectFactory.create() release1 = ReleaseFactory.create(project=project) release1.created = datetime.date(2011, 1, 1) release1._classifiers.append(classifier1) release1._classifiers.append(classifier2) page_obj = pretend.stub(page_count=(page or 1) + 10, item_count=1000) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) search_view = search(db_request) assert search_view == { "page": page_obj, "term": params.get("q", ""), "order": "", "applied_filters": params.getall("c"), "available_filters": [{ "foo": { classifier1.classifier.split(" :: ")[1]: {}, classifier2.classifier.split(" :: ")[1]: {}, } }], } assert ("fiz", [classifier3.classifier ]) not in search_view["available_filters"] assert page_cls.calls == [ pretend.call(es_query, url_maker=url_maker, page=page or 1) ] assert url_maker_factory.calls == [pretend.call(db_request)] assert get_es_query.calls == [ pretend.call(db_request.es, params.get("q"), "", params.getall("c")) ] assert metrics.histogram.calls == [ pretend.call("warehouse.views.search.results", 1000) ]
def test_without_a_query(self, monkeypatch, db_request, page): params = MultiDict() if page is not None: params["page"] = page db_request.params = params es_query = pretend.stub() db_request.es = pretend.stub(query=lambda *a, **kw: es_query) page_obj = pretend.stub(page_count=(page or 1) + 10) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) assert search(db_request) == { "page": page_obj, "term": params.get("q", ''), "order": params.get("o", ''), "applied_filters": [], "available_filters": [], } assert page_cls.calls == [ pretend.call(es_query, url_maker=url_maker, page=page or 1), ] assert url_maker_factory.calls == [pretend.call(db_request)]
def test_without_a_query(self, monkeypatch, db_request, page): params = MultiDict() if page is not None: params["page"] = page db_request.params = params es_query = pretend.stub() db_request.es = pretend.stub(query=lambda *a, **kw: es_query) page_obj = pretend.stub(page_count=(page or 1) + 10, item_count=1000) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) assert search(db_request) == { "page": page_obj, "term": params.get("q", ''), "order": params.get("o", ''), "applied_filters": [], "available_filters": [], } assert page_cls.calls == [ pretend.call(es_query, url_maker=url_maker, page=page or 1), ] assert url_maker_factory.calls == [pretend.call(db_request)]
def test_without_a_query(self, monkeypatch, page): params = {} if page is not None: params["page"] = page query = pretend.stub() request = pretend.stub( es=pretend.stub(query=lambda: query), params=params, ) page_obj = pretend.stub() page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) assert search(request) == { "page": page_obj, "term": params.get("q"), "order": params.get("o"), } assert page_cls.calls == [ pretend.call(query, url_maker=url_maker, page=page or 1), ] assert url_maker_factory.calls == [pretend.call(request)]
def test_with_a_query(self, monkeypatch, db_request, page): params = MultiDict({"q": "foo bar"}) if page is not None: params["page"] = page db_request.params = params sort = pretend.stub() suggest = pretend.stub( sort=pretend.call_recorder(lambda *a, **kw: sort), ) es_query = pretend.stub( suggest=pretend.call_recorder(lambda *a, **kw: suggest), ) db_request.es = pretend.stub( query=pretend.call_recorder(lambda *a, **kw: es_query)) page_obj = pretend.stub(page_count=(page or 1) + 10) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) assert search(db_request) == { "page": page_obj, "term": params.get("q", ''), "order": params.get("o", ''), "applied_filters": [], "available_filters": [], } assert page_cls.calls == [ pretend.call(suggest, url_maker=url_maker, page=page or 1), ] assert url_maker_factory.calls == [pretend.call(db_request)] assert db_request.es.query.calls == [ pretend.call( "multi_match", query="foo bar", fields=[ "author", "author_email", "description^5", "download_url", "home_page", "keywords^5", "license", "maintainer", "maintainer_email", "normalized_name^10", "platform", "summary^5", ], ), ] assert es_query.suggest.calls == [ pretend.call( name="name_suggestion", term={"field": "name"}, text="foo bar", ), ]
def test_with_an_ordering(self, monkeypatch, db_request, page): params = MultiDict({"q": "foo bar", "o": "-created"}) if page is not None: params["page"] = page db_request.params = params sort = pretend.stub() suggest = pretend.stub( sort=pretend.call_recorder(lambda *a, **kw: sort), ) es_query = pretend.stub( suggest=pretend.call_recorder(lambda *a, **kw: suggest), ) db_request.es = pretend.stub( query=pretend.call_recorder(lambda *a, **kw: es_query) ) page_obj = pretend.stub(page_count=(page or 1) + 10) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) assert search(db_request) == { "page": page_obj, "term": params.get("q", ''), "order": params.get("o", ''), "applied_filters": [], "available_filters": [], } assert page_cls.calls == [ pretend.call(sort, url_maker=url_maker, page=page or 1), ] assert url_maker_factory.calls == [pretend.call(db_request)] assert db_request.es.query.calls == [ pretend.call( "multi_match", query="foo bar", fields=[ "name^2", "version", "author", "author_email", "maintainer", "maintainer_email", "home_page", "license", "summary", "description", "keywords", "platform", "download_url", ], ), ] assert es_query.suggest.calls == [ pretend.call( name="name_suggestion", term={"field": "name"}, text="foo bar", ), ] assert suggest.sort.calls == [ pretend.call("-created") ]
def test_with_an_ordering(self, monkeypatch, db_request, page, order, expected): params = MultiDict({"q": "foo bar"}) if page is not None: params["page"] = page if order is not None: params["o"] = order db_request.params = params sort = pretend.stub() suggest = pretend.stub( sort=pretend.call_recorder(lambda *a, **kw: sort), ) es_query = pretend.stub( suggest=pretend.call_recorder(lambda *a, **kw: suggest), ) db_request.es = pretend.stub( query=pretend.call_recorder(lambda *a, **kw: es_query) ) page_obj = pretend.stub(page_count=(page or 1) + 10) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) assert search(db_request) == { "page": page_obj, "term": params.get("q", ''), "order": params.get("o", ''), "applied_filters": [], "available_filters": [], } assert page_cls.calls == [ pretend.call( sort if order is not None else suggest, url_maker=url_maker, page=page or 1, ), ] assert url_maker_factory.calls == [pretend.call(db_request)] assert db_request.es.query.calls == [ pretend.call( "dis_max", queries=self._gather_es_queries(params["q"]) ) ] assert es_query.suggest.calls == [ pretend.call( "name_suggestion", params["q"], term={"field": "name"}, ), ] assert suggest.sort.calls == [pretend.call(i) for i in expected]
def test_raises_400_with_pagenum_type_str(self, monkeypatch, db_request): params = MultiDict({"page": "abc"}) db_request.params = params es_query = pretend.stub() db_request.es = pretend.stub(query=lambda *a, **kw: es_query) page_obj = pretend.stub(page_count=10, item_count=1000) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) with pytest.raises(HTTPBadRequest): search(db_request) assert page_cls.calls == []
def test_raises_400_with_pagenum_type_str(self, monkeypatch, db_request): params = MultiDict({"page": "abc"}) db_request.params = params es_query = pretend.stub() db_request.es = pretend.stub(query=lambda *a, **kw: es_query) page_obj = pretend.stub(page_count=10) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) with pytest.raises(HTTPBadRequest): search(db_request) assert page_cls.calls == []
def test_with_a_query(self, monkeypatch, page): params = {"q": "foo bar"} if page is not None: params["page"] = page suggest = pretend.stub() query = pretend.stub( suggest=pretend.call_recorder(lambda *a, **kw: suggest), ) request = pretend.stub( es=pretend.stub( query=pretend.call_recorder(lambda *a, **kw: query), ), params=params, ) page_obj = pretend.stub() page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) assert search(request) == {"page": page_obj, "term": params.get("q")} assert page_cls.calls == [ pretend.call(suggest, url_maker=url_maker, page=page or 1), ] assert url_maker_factory.calls == [pretend.call(request)] assert request.es.query.calls == [ pretend.call( "multi_match", query="foo bar", fields=[ "name", "version", "author", "author_email", "maintainer", "maintainer_email", "home_page", "license", "summary", "description", "keywords", "platform", "download_url", ], ), ] assert query.suggest.calls == [ pretend.call( name="name_suggestion", term={"field": "name"}, text="foo bar", ), ]
def test_returns_503_when_es_unavailable(self, monkeypatch, db_request, metrics): params = MultiDict({"page": 15}) db_request.params = params es_query = pretend.stub() db_request.es = pretend.stub(query=lambda *a, **kw: es_query) def raiser(*args, **kwargs): raise elasticsearch.ConnectionError() monkeypatch.setattr(views, "ElasticsearchPage", raiser) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) with pytest.raises(HTTPServiceUnavailable): search(db_request) assert url_maker_factory.calls == [pretend.call(db_request)] assert metrics.increment.calls == [pretend.call("warehouse.views.search.error")] assert metrics.histogram.calls == []
def test_raises_404_with_pagenum_too_high(self, monkeypatch, db_request): params = MultiDict({"page": 15}) db_request.params = params es_query = pretend.stub() db_request.es = pretend.stub(query=lambda *a, **kw: es_query) page_obj = pretend.stub(page_count=10) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) with pytest.raises(HTTPNotFound): search(db_request) assert page_cls.calls == [ pretend.call(es_query, url_maker=url_maker, page=15 or 1), ] assert url_maker_factory.calls == [pretend.call(db_request)]
def test_with_a_query(self, monkeypatch, page): params = {"q": "foo bar"} if page is not None: params["page"] = page suggest = pretend.stub() query = pretend.stub( suggest=pretend.call_recorder(lambda *a, **kw: suggest), ) request = pretend.stub( es=pretend.stub( query=pretend.call_recorder(lambda *a, **kw: query), ), params=params, ) page_obj = pretend.stub() page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) assert search(request) == { "page": page_obj, "term": params.get("q"), "order": params.get("o"), } assert page_cls.calls == [ pretend.call(suggest, url_maker=url_maker, page=page or 1), ] assert url_maker_factory.calls == [pretend.call(request)] assert request.es.query.calls == [ pretend.call( "multi_match", query="foo bar", fields=[ "name^2", "version", "author", "author_email", "maintainer", "maintainer_email", "home_page", "license", "summary", "description", "keywords", "platform", "download_url", ], ), ] assert query.suggest.calls == [ pretend.call( name="name_suggestion", term={"field": "name"}, text="foo bar", ), ]
def test_with_a_single_char_query(self, monkeypatch, db_request, page): params = MultiDict({"q": "a"}) if page is not None: params["page"] = page db_request.params = params sort = pretend.stub() suggest = pretend.stub( sort=pretend.call_recorder(lambda *a, **kw: sort), ) es_query = pretend.stub( suggest=pretend.call_recorder(lambda *a, **kw: suggest), ) db_request.es = pretend.stub( query=pretend.call_recorder(lambda *a, **kw: es_query) ) page_obj = pretend.stub(page_count=(page or 1) + 10, item_count=1000) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) assert search(db_request) == { "page": page_obj, "term": params.get("q", ''), "order": params.get("o", ''), "applied_filters": [], "available_filters": [], } assert page_cls.calls == [ pretend.call(suggest, url_maker=url_maker, page=page or 1), ] assert url_maker_factory.calls == [pretend.call(db_request)] assert db_request.es.query.calls == [ pretend.call( views.gather_es_queries(params["q"]) ) ] assert es_query.suggest.calls == [ pretend.call( "name_suggestion", params["q"], term={"field": "name"}, ), ] assert db_request.registry.datadog.histogram.calls == [ pretend.call('warehouse.views.search.results', 1000) ]
def test_with_a_single_char_query(self, monkeypatch, db_request, page): params = MultiDict({"q": "a"}) if page is not None: params["page"] = page db_request.params = params sort = pretend.stub() suggest = pretend.stub( sort=pretend.call_recorder(lambda *a, **kw: sort), ) es_query = pretend.stub( suggest=pretend.call_recorder(lambda *a, **kw: suggest), ) db_request.es = pretend.stub( query=pretend.call_recorder(lambda *a, **kw: es_query)) page_obj = pretend.stub(page_count=(page or 1) + 10, item_count=1000) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) assert search(db_request) == { "page": page_obj, "term": params.get("q", ''), "order": params.get("o", ''), "applied_filters": [], "available_filters": [], } assert page_cls.calls == [ pretend.call(suggest, url_maker=url_maker, page=page or 1), ] assert url_maker_factory.calls == [pretend.call(db_request)] assert db_request.es.query.calls == [ pretend.call("dis_max", queries=self._gather_es_queries(params["q"])) ] assert es_query.suggest.calls == [ pretend.call( "name_suggestion", params["q"], term={"field": "name"}, ), ] assert db_request.registry.datadog.histogram.calls == [ pretend.call('warehouse.views.search.results', 1000) ]
def test_with_exact_phrase_query(self, monkeypatch, db_request, metrics, page): params = MultiDict({"q": '"foo bar"'}) if page is not None: params["page"] = page db_request.params = params sort = pretend.stub() suggest = pretend.stub( sort=pretend.call_recorder(lambda *a, **kw: sort)) es_query = pretend.stub( suggest=pretend.call_recorder(lambda *a, **kw: suggest)) db_request.es = pretend.stub( query=pretend.call_recorder(lambda *a, **kw: es_query)) page_obj = pretend.stub(page_count=(page or 1) + 10, item_count=(page or 1) + 10) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) assert search(db_request) == { "page": page_obj, "term": params.get("q", ""), "order": params.get("o", ""), "applied_filters": [], "available_filters": [], } assert page_cls.calls == [ pretend.call(suggest, url_maker=url_maker, page=page or 1) ] assert url_maker_factory.calls == [pretend.call(db_request)] assert db_request.es.query.calls == [ pretend.call(views.gather_es_queries(params["q"])) ] assert es_query.suggest.calls == [ pretend.call("name_suggestion", params["q"], term={"field": "name"}) ] assert metrics.histogram.calls == [ pretend.call("warehouse.views.search.results", (page or 1) + 10) ]
def test_without_a_query(self, monkeypatch, page): params = {} if page is not None: params["page"] = page query = pretend.stub() request = pretend.stub( es=pretend.stub(query=lambda: query), params=params, ) page_obj = pretend.stub() page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) assert search(request) == {"page": page_obj} assert page_cls.calls == [ pretend.call(query, url_maker=url_maker, page=page or 1), ] assert url_maker_factory.calls == [pretend.call(request)]
def test_returns_404_with_pagenum_too_high(self, monkeypatch, db_request, metrics): params = MultiDict({"page": 15}) db_request.params = params es_query = pretend.stub() db_request.es = pretend.stub(query=lambda *a, **kw: es_query) page_obj = pretend.stub(page_count=10, item_count=1000) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) resp = search(db_request) assert isinstance(resp, HTTPNotFound) assert page_cls.calls == [ pretend.call(es_query, url_maker=url_maker, page=15 or 1) ] assert url_maker_factory.calls == [pretend.call(db_request)] assert metrics.histogram.calls == []
def test_with_a_query(self, monkeypatch, db_request, metrics, page): params = MultiDict({"q": "foo bar"}) if page is not None: params["page"] = page db_request.params = params db_request.es = pretend.stub() es_query = pretend.stub() get_es_query = pretend.call_recorder(lambda *a, **kw: es_query) monkeypatch.setattr(views, "get_es_query", get_es_query) page_obj = pretend.stub(page_count=(page or 1) + 10, item_count=1000) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) assert search(db_request) == { "page": page_obj, "term": params.get("q", ""), "order": "", "applied_filters": [], "available_filters": [], } assert get_es_query.calls == [ pretend.call(db_request.es, params.get("q"), "", []) ] assert page_cls.calls == [ pretend.call(es_query, url_maker=url_maker, page=page or 1) ] assert url_maker_factory.calls == [pretend.call(db_request)] assert metrics.histogram.calls == [ pretend.call("warehouse.views.search.results", 1000) ]
def test_with_classifiers(self, monkeypatch, db_request, page): params = MultiDict([ ("q", "foo bar"), ("c", "foo :: bar"), ("c", "fiz :: buz"), ]) if page is not None: params["page"] = page db_request.params = params es_query = pretend.stub( suggest=pretend.call_recorder(lambda *a, **kw: es_query), filter=pretend.call_recorder(lambda *a, **kw: es_query), sort=pretend.call_recorder(lambda *a, **kw: es_query), ) db_request.es = pretend.stub( query=pretend.call_recorder(lambda *a, **kw: es_query)) classifier1 = ClassifierFactory.create(classifier="foo :: bar") classifier2 = ClassifierFactory.create(classifier="foo :: baz") classifier3 = ClassifierFactory.create(classifier="fiz :: buz") project = ProjectFactory.create() release1 = ReleaseFactory.create(project=project) release1.created = datetime.date(2011, 1, 1) release1._classifiers.append(classifier1) release1._classifiers.append(classifier2) page_obj = pretend.stub(page_count=(page or 1) + 10, item_count=1000) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) search_view = search(db_request) assert search_view == { "page": page_obj, "term": params.get("q", ''), "order": params.get("o", ''), "applied_filters": params.getall("c"), "available_filters": [('foo', [ classifier1.classifier, classifier2.classifier, ])], } assert (("fiz", [classifier3.classifier]) not in search_view["available_filters"]) assert page_cls.calls == [ pretend.call(es_query, url_maker=url_maker, page=page or 1), ] assert url_maker_factory.calls == [pretend.call(db_request)] assert db_request.es.query.calls == [ pretend.call(views.gather_es_queries(params["q"])) ] assert es_query.suggest.calls == [ pretend.call( "name_suggestion", params["q"], term={"field": "name"}, ), ] assert es_query.filter.calls == [ pretend.call('terms', classifiers=['foo :: bar']), pretend.call('terms', classifiers=['fiz :: buz']) ]
def test_with_classifiers(self, monkeypatch, db_request, metrics, page): params = MultiDict([("q", "foo bar"), ("c", "foo :: bar"), ("c", "fiz :: buz")]) if page is not None: params["page"] = page db_request.params = params es_query = pretend.stub( suggest=pretend.call_recorder(lambda *a, **kw: es_query), filter=pretend.call_recorder(lambda *a, **kw: es_query), query=pretend.call_recorder(lambda *a, **kw: es_query), sort=pretend.call_recorder(lambda *a, **kw: es_query), ) db_request.es = pretend.stub( query=pretend.call_recorder(lambda *a, **kw: es_query) ) classifier1 = ClassifierFactory.create(classifier="foo :: bar") classifier2 = ClassifierFactory.create(classifier="foo :: baz") classifier3 = ClassifierFactory.create(classifier="fiz :: buz") project = ProjectFactory.create() release1 = ReleaseFactory.create(project=project) release1.created = datetime.date(2011, 1, 1) release1._classifiers.append(classifier1) release1._classifiers.append(classifier2) page_obj = pretend.stub(page_count=(page or 1) + 10, item_count=1000) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) search_view = search(db_request) assert search_view == { "page": page_obj, "term": params.get("q", ""), "order": params.get("o", ""), "applied_filters": params.getall("c"), "available_filters": [ { "foo": { classifier1.classifier.split(" :: ")[1]: {}, classifier2.classifier.split(" :: ")[1]: {}, } } ], } assert ("fiz", [classifier3.classifier]) not in search_view["available_filters"] assert page_cls.calls == [ pretend.call(es_query, url_maker=url_maker, page=page or 1) ] assert url_maker_factory.calls == [pretend.call(db_request)] assert db_request.es.query.calls == [ pretend.call(views.gather_es_queries(params["q"])) ] assert es_query.suggest.calls == [ pretend.call("name_suggestion", params["q"], term={"field": "name"}) ] assert es_query.query.calls == [ pretend.call("prefix", classifiers="foo :: bar"), pretend.call("prefix", classifiers="fiz :: buz"), ] assert metrics.histogram.calls == [ pretend.call("warehouse.views.search.results", 1000) ]
def test_with_classifiers(self, monkeypatch, db_request, page): params = MultiDict([ ("q", "foo bar"), ("c", "foo :: bar"), ("c", "fiz :: buz"), ]) if page is not None: params["page"] = page db_request.params = params es_query = pretend.stub( suggest=pretend.call_recorder(lambda *a, **kw: es_query), filter=pretend.call_recorder(lambda *a, **kw: es_query), sort=pretend.call_recorder(lambda *a, **kw: es_query), ) db_request.es = pretend.stub( query=pretend.call_recorder(lambda *a, **kw: es_query) ) classifier1 = ClassifierFactory.create(classifier="foo :: bar") classifier2 = ClassifierFactory.create(classifier="foo :: baz") classifier3 = ClassifierFactory.create(classifier="fiz :: buz") project = ProjectFactory.create() release1 = ReleaseFactory.create(project=project) release1.created = datetime.date(2011, 1, 1) release1._classifiers.append(classifier1) release1._classifiers.append(classifier2) page_obj = pretend.stub(page_count=(page or 1) + 10) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) search_view = search(db_request) assert search_view == { "page": page_obj, "term": params.get("q", ''), "order": params.get("o", ''), "applied_filters": params.getall("c"), "available_filters": [ ('foo', [ classifier1.classifier, classifier2.classifier, ]) ], } assert ( ("fiz", [ classifier3.classifier ]) not in search_view["available_filters"] ) assert page_cls.calls == [ pretend.call(es_query, url_maker=url_maker, page=page or 1), ] assert url_maker_factory.calls == [pretend.call(db_request)] assert db_request.es.query.calls == [ pretend.call( "dis_max", queries=self._gather_es_queries(params["q"]) ) ] assert es_query.suggest.calls == [ pretend.call( "name_suggestion", params["q"], term={"field": "name"}, ), ] assert es_query.filter.calls == [ pretend.call('terms', classifiers=['foo :: bar', 'fiz :: buz']) ]
def test_with_classifiers(self, monkeypatch, db_request, page): params = MultiDict([ ("q", "foo bar"), ("c", "foo :: bar"), ("c", "fiz :: buz"), ]) if page is not None: params["page"] = page db_request.params = params es_query = pretend.stub( suggest=pretend.call_recorder(lambda *a, **kw: es_query), filter=pretend.call_recorder(lambda *a, **kw: es_query), sort=pretend.call_recorder(lambda *a, **kw: es_query), ) db_request.es = pretend.stub( query=pretend.call_recorder(lambda *a, **kw: es_query)) classifier1 = ClassifierFactory.create(classifier="foo :: bar") classifier2 = ClassifierFactory.create(classifier="foo :: baz") classifier3 = ClassifierFactory.create(classifier="fiz :: buz") page_obj = pretend.stub(page_count=(page or 1) + 10) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) assert search(db_request) == { "page": page_obj, "term": params.get("q", ''), "order": params.get("o", ''), "applied_filters": params.getall("c"), "available_filters": [('fiz', [classifier3.classifier]), ('foo', [ classifier1.classifier, classifier2.classifier, ])], } assert page_cls.calls == [ pretend.call(es_query, url_maker=url_maker, page=page or 1), ] assert url_maker_factory.calls == [pretend.call(db_request)] assert db_request.es.query.calls == [ pretend.call( "multi_match", query="foo bar", fields=[ "author", "author_email", "description^5", "download_url", "home_page", "keywords^5", "license", "maintainer", "maintainer_email", "normalized_name^10", "platform", "summary^5", ], ), ] assert es_query.suggest.calls == [ pretend.call( name="name_suggestion", term={"field": "name"}, text="foo bar", ), ] assert es_query.filter.calls == [ pretend.call('terms', classifiers=['foo :: bar', 'fiz :: buz']) ]
def test_with_classifiers(self, monkeypatch, db_request, page): params = MultiDict([ ("q", "foo bar"), ("c", "foo :: bar"), ("c", "fiz :: buz"), ]) if page is not None: params["page"] = page db_request.params = params es_query = pretend.stub( suggest=pretend.call_recorder(lambda *a, **kw: es_query), filter=pretend.call_recorder(lambda *a, **kw: es_query), sort=pretend.call_recorder(lambda *a, **kw: es_query), ) db_request.es = pretend.stub( query=pretend.call_recorder(lambda *a, **kw: es_query) ) classifier1 = ClassifierFactory.create(classifier="foo :: bar") classifier2 = ClassifierFactory.create(classifier="foo :: baz") classifier3 = ClassifierFactory.create(classifier="fiz :: buz") page_obj = pretend.stub(page_count=(page or 1) + 10) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) assert search(db_request) == { "page": page_obj, "term": params.get("q", ''), "order": params.get("o", ''), "applied_filters": params.getall("c"), "available_filters": [ ('fiz', [classifier3.classifier]), ('foo', [ classifier1.classifier, classifier2.classifier, ]) ], } assert page_cls.calls == [ pretend.call(es_query, url_maker=url_maker, page=page or 1), ] assert url_maker_factory.calls == [pretend.call(db_request)] assert db_request.es.query.calls == [ pretend.call( "multi_match", query="foo bar", fields=[ "name^2", "version", "author", "author_email", "maintainer", "maintainer_email", "home_page", "license", "summary", "description", "keywords", "platform", "download_url", ], ), ] assert es_query.suggest.calls == [ pretend.call( name="name_suggestion", term={"field": "name"}, text="foo bar", ), ] assert es_query.filter.calls == [ pretend.call('terms', classifiers=['foo :: bar', 'fiz :: buz']) ]