def tearDown(self): # Cleanup to remove these from the index. self.app1.delete() self.app2.delete() unindex_webapps([self.app1.id, self.app2.id]) # Required to purge the suggestions data structure. In Lucene, a # document is not deleted from a segment, just marked as deleted. WebappIndexer.get_es().indices.optimize(index=WebappIndexer.get_index(), only_expunge_deletes=True)
def tearDown(self): # Cleanup to remove these from the index. self.app1.delete() self.app2.delete() unindex_webapps([self.app1.id, self.app2.id]) # Required to purge the suggestions data structure. In Lucene, a # document is not deleted from a segment, just marked as deleted. WebappIndexer.get_es().indices.optimize( index=WebappIndexer.get_index(), only_expunge_deletes=True)
def get(self, request, *args, **kwargs): limit = request.GET.get('limit', 5) es_query = { 'apps': { 'completion': { 'field': 'name_suggest', 'size': limit }, 'text': request.GET.get('q', '').strip() } } results = WebappIndexer.get_es().suggest( body=es_query, index=WebappIndexer.get_index()) if 'apps' in results: data = results['apps'][0]['options'] else: data = [] serializer = self.get_serializer(data) # This returns a JSON list. Usually this is a bad idea for security # reasons, but we don't include any user-specific data, it's fully # anonymous, so we're fine. return HttpResponse(json.dumps(serializer.data), content_type='application/x-rocketbar+json')
def search_webapps_and_homescreens(): return (Search( using=WebappIndexer.get_es(), index=[ settings.ES_INDEXES['homescreen'], settings.ES_INDEXES['webapp'] ], doc_type=['homescreen', 'webapp' ]).extra(_source={'exclude': WebappIndexer.hidden_fields}))
def mget_apps(self, app_ids): """ Takes a list of app_ids. Does an ES mget. Returns an app_map for serializer context. """ app_map = {} es = WebappIndexer.get_es() apps = es.mget(body={'ids': app_ids}, index=WebappIndexer.get_index(), doc_type=WebappIndexer.get_mapping_type_name()) for app in apps['docs']: # Store the apps to attach to feed elements later. app = app['_source'] app_map[app['id']] = app return app_map
def test_single_hit(self): """Test the ESPaginator only queries ES one time.""" es = WebappIndexer.get_es() orig_search = es.search es.counter = 0 def monkey_search(*args, **kwargs): es.counter += 1 return orig_search(*args, **kwargs) es.search = monkey_search ESPaginator(WebappIndexer.search(), 5).object_list.execute() eq_(es.counter, 1) es.search = orig_search
def index_webapps(ids, **kw): """TODO: use search/indexers.py:index.""" task_log.info('Indexing apps %s-%s. [%s]' % (ids[0], ids[-1], len(ids))) index = kw.pop('index', WebappIndexer.get_index()) # Note: If reindexing is currently occurring, `get_indices` will return # more than one index. indices = Reindexing.get_indices(index) es = WebappIndexer.get_es(urls=settings.ES_URLS) qs = Webapp.indexing_transformer(Webapp.with_deleted.no_cache().filter( id__in=ids)) for obj in qs: doc = WebappIndexer.extract_document(obj.id, obj) for idx in indices: WebappIndexer.index(doc, id_=obj.id, es=es, index=idx)
def test_q_num_requests_no_results(self): es = WebappIndexer.get_es() orig_search = es.search es.counter = 0 def monkey_search(*args, **kwargs): es.counter += 1 return orig_search(*args, **kwargs) es.search = monkey_search res = self.anon.get(self.url, data={'q': 'noresults'}) eq_(res.status_code, 200) eq_(res.json['meta']['total_count'], 0) eq_(len(res.json['objects']), 0) # Verify only one search call was made. eq_(es.counter, 1) es.search = orig_search
def unindex_webapps(ids, **kw): if not ids: return task_log.info('Un-indexing apps %s-%s. [%s]' % (ids[0], ids[-1], len(ids))) index = kw.pop('index', WebappIndexer.get_index()) # Note: If reindexing is currently occurring, `get_indices` will return # more than one index. indices = Reindexing.get_indices(index) es = WebappIndexer.get_es(urls=settings.ES_URLS) for id_ in ids: for idx in indices: try: WebappIndexer.unindex(id_=id_, es=es, index=idx) except ElasticHttpNotFoundError: # Ignore if it's not there. task_log.info( u'[Webapp:%s] Unindexing app but not found in index' % id_)
def test_q_num_requests_no_results(self): es = WebappIndexer.get_es() orig_search = es.search es.counter = 0 def monkey_search(*args, **kwargs): es.counter += 1 return orig_search(*args, **kwargs) es.search = monkey_search res = self.client.get(self.url, data={'q': 'noresults'}) eq_(res.status_code, 200) eq_(res.json['meta']['total_count'], 0) eq_(len(res.json['objects']), 0) # Verify only one search call was made. eq_(es.counter, 1) es.search = orig_search
def handle(self, *args, **kwargs): index = WebappIndexer.get_index() doctype = WebappIndexer.get_mapping_type_name() es = WebappIndexer.get_es() app_ids = Webapp.objects.values_list('id', flat=True) missing_ids = [] for app_id in app_ids: try: es.get(index, app_id, doctype, fields='id') except elasticsearch.NotFoundError: # App doesn't exist in our index, add it to `missing_ids`. missing_ids.append(app_id) if missing_ids: sys.stdout.write('Adding %s doc(s) to the index.' % len(missing_ids)) WebappIndexer().run_indexing(missing_ids, es) else: sys.stdout.write('No docs missing from index.')
def get(self, request, *args, **kwargs): limit = request.GET.get('limit', 5) es_query = { 'apps': { 'completion': {'field': 'name_suggest', 'size': limit}, 'text': request.GET.get('q', '').strip() } } results = WebappIndexer.get_es().suggest( body=es_query, index=WebappIndexer.get_index()) if 'apps' in results: data = results['apps'][0]['options'] else: data = [] serializer = self.get_serializer(data) # This returns a JSON list. Usually this is a bad idea for security # reasons, but we don't include any user-specific data, it's fully # anonymous, so we're fine. return HttpResponse(json.dumps(serializer.data), content_type='application/x-rocketbar+json')
def test_q_num_requests(self): es = WebappIndexer.get_es() orig_search = es.search es.counter = 0 def monkey_search(*args, **kwargs): es.counter += 1 return orig_search(*args, **kwargs) es.search = monkey_search res = self.anon.get(self.url, data={'q': 'something'}) eq_(res.status_code, 200) eq_(res.json['meta']['total_count'], 1) eq_(len(res.json['objects']), 1) obj = res.json['objects'][0] eq_(obj['slug'], self.webapp.app_slug) # Verify only one search call was made. eq_(es.counter, 1) es.search = orig_search
def test_q_num_requests(self): es = WebappIndexer.get_es() orig_search = es.search es.counter = 0 def monkey_search(*args, **kwargs): es.counter += 1 return orig_search(*args, **kwargs) es.search = monkey_search res = self.client.get(self.url, data={"q": "something"}) eq_(res.status_code, 200) eq_(res.json["meta"]["total_count"], 1) eq_(len(res.json["objects"]), 1) obj = res.json["objects"][0] eq_(obj["slug"], self.webapp.app_slug) # Verify only one search call was made. eq_(es.counter, 1) es.search = orig_search
def search_webapps_and_homescreens(): return Search( using=WebappIndexer.get_es(), index=[settings.ES_INDEXES["homescreen"], settings.ES_INDEXES["webapp"]], doc_type=["homescreen", "webapp"], ).extra(_source={"exclude": WebappIndexer.hidden_fields})
def search_webapps_and_homescreens(): return (Search(using=WebappIndexer.get_es(), index=[settings.ES_INDEXES['homescreen'], settings.ES_INDEXES['webapp']], doc_type=['homescreen', 'webapp']) .extra(_source={'exclude': WebappIndexer.hidden_fields}))