def setUp(self): self.feed = self.feed_factory() self.data_es = [ feed_item.get_indexer().extract_document(None, obj=feed_item) for feed_item in self.feed] # Denormalize feed elements into the serializer context. self.app_map = {} self.feed_element_map = defaultdict(dict) for i, feed_item in enumerate(self.data_es): feed_element = getattr(self.feed[i], feed_item['item_type']) self.feed_element_map[feed_item['item_type']][feed_element.id] = ( feed_element.get_indexer().extract_document(None, obj=feed_element)) # Denormalize apps into serializer context. if hasattr(feed_element, 'apps'): for app in feed_element.apps(): self.app_map[app.id] = WebappIndexer.extract_document( None, obj=app) else: self.app_map[feed_element.app_id] = ( WebappIndexer.extract_document(feed_element.app_id)) self.context = { 'app_map': self.app_map, 'feed_element_map': self.feed_element_map, 'request': mkt.site.tests.req_factory_factory('') }
def get(self, request, *args, **kwargs): limit = request.GET.get('limit', 5) es_query = { 'apps': { 'completion': { 'field': 'name_suggest', 'size': limit }, 'text': request.GET.get('q', '').strip() } } results = WebappIndexer.get_es().suggest( body=es_query, index=WebappIndexer.get_index()) if 'apps' in results: data = results['apps'][0]['options'] else: data = [] serializer = self.get_serializer(data) # This returns a JSON list. Usually this is a bad idea for security # reasons, but we don't include any user-specific data, it's fully # anonymous, so we're fine. return HttpResponse(json.dumps(serializer.data), content_type='application/x-rocketbar+json')
def setUp(self): self.feed = self.feed_factory() self.data_es = [ feed_item.get_indexer().extract_document(None, obj=feed_item) for feed_item in self.feed ] # Denormalize feed elements into the serializer context. self.app_map = {} self.feed_element_map = defaultdict(dict) for i, feed_item in enumerate(self.data_es): feed_element = getattr(self.feed[i], feed_item['item_type']) self.feed_element_map[feed_item['item_type']][feed_element.id] = ( feed_element.get_indexer().extract_document(None, obj=feed_element)) # Denormalize apps into serializer context. if hasattr(feed_element, 'apps'): for app in feed_element.apps(): self.app_map[app.id] = WebappIndexer.extract_document( None, obj=app) else: self.app_map[feed_element.app_id] = ( WebappIndexer.extract_document(feed_element.app_id)) self.context = { 'app_map': self.app_map, 'feed_element_map': self.feed_element_map, 'request': mkt.site.tests.req_factory_factory('') }
def tearDown(self): # Cleanup to remove these from the index. self.app1.delete() self.app2.delete() unindex_webapps([self.app1.id, self.app2.id]) # Required to purge the suggestions data structure. In Lucene, a # document is not deleted from a segment, just marked as deleted. WebappIndexer.get_es().indices.optimize(index=WebappIndexer.get_index(), only_expunge_deletes=True)
def tearDown(self): # Cleanup to remove these from the index. self.app1.delete() self.app2.delete() unindex_webapps([self.app1.id, self.app2.id]) # Required to purge the suggestions data structure. In Lucene, a # document is not deleted from a segment, just marked as deleted. WebappIndexer.get_es().indices.optimize( index=WebappIndexer.get_index(), only_expunge_deletes=True)
def index_webapps(ids, **kw): # DEPRECATED: call WebappIndexer.index_ids directly. homescreens = set( Webapp.tags.through.objects.filter( webapp_id__in=ids, tag__tag_text='homescreen').values_list('webapp_id', flat=True)) webapps = set(ids) - homescreens if homescreens: HomescreenIndexer.index_ids(list(homescreens), no_delay=True) if webapps: WebappIndexer.index_ids(list(webapps), no_delay=True)
def index_webapps(ids, **kw): # DEPRECATED: call WebappIndexer.index_ids directly. homescreens = set( Webapp.tags.through.objects.filter( webapp_id__in=ids, tag__tag_text='homescreen') .values_list('webapp_id', flat=True)) webapps = set(ids) - homescreens if homescreens: HomescreenIndexer.index_ids(list(homescreens), no_delay=True) if webapps: WebappIndexer.index_ids(list(webapps), no_delay=True)
def mget_apps(self, app_ids): """ Takes a list of app_ids. Does an ES mget. Returns an app_map for serializer context. """ app_map = {} es = WebappIndexer.get_es() apps = es.mget(body={'ids': app_ids}, index=WebappIndexer.get_index(), doc_type=WebappIndexer.get_mapping_type_name()) for app in apps['docs']: # Store the apps to attach to feed elements later. app = app['_source'] app_map[app['id']] = app return app_map
def app_search(request): results = [] q = request.GET.get("q", u"").lower().strip() limit = lkp.MAX_RESULTS if request.GET.get("all_results") else lkp.SEARCH_LIMIT fields = ("name", "app_slug") non_es_fields = ["id", "name__localized_string"] + list(fields) if q.isnumeric(): qs = Webapp.objects.filter(pk=q).values(*non_es_fields)[:limit] else: # Try to load by GUID: qs = Webapp.objects.filter(guid=q).values(*non_es_fields)[:limit] if not qs.count(): qs = WebappIndexer.search().query(_expand_query(q, fields))[:limit] # TODO: Update to `.fields(...)` when the DSL supports it. qs = qs.execute() for app in qs: if isinstance(app, dict): # This is a result from the database. app["url"] = reverse("lookup.app_summary", args=[app["id"]]) app["name"] = app["name__localized_string"] results.append(app) else: # This is a result from elasticsearch which returns `Result` # objects and name as a list, one for each locale. for name in app.name: results.append( { "id": app.id, "url": reverse("lookup.app_summary", args=[app.id]), "app_slug": app.get("app_slug"), "name": name, } ) return {"results": results}
def index_webapps(ids, **kw): """TODO: use search/indexers.py:index.""" task_log.info('Indexing apps %s-%s. [%s]' % (ids[0], ids[-1], len(ids))) index = kw.pop('index', WebappIndexer.get_index()) # Note: If reindexing is currently occurring, `get_indices` will return # more than one index. indices = Reindexing.get_indices(index) es = WebappIndexer.get_es(urls=settings.ES_URLS) qs = Webapp.indexing_transformer(Webapp.with_deleted.no_cache().filter( id__in=ids)) for obj in qs: doc = WebappIndexer.extract_document(obj.id, obj) for idx in indices: WebappIndexer.index(doc, id_=obj.id, es=es, index=idx)
def test_mapping_properties(self): # Spot check a few of the key properties. mapping = WebappIndexer.get_mapping() keys = mapping['webapp']['properties'].keys() for k in ('id', 'app_slug', 'category', 'default_locale', 'description', 'device', 'features', 'name', 'status'): ok_(k in keys, 'Key %s not found in mapping properties' % k)
def get_updates_queue(self): # Updated apps, i.e. apps that have been published but have new # unreviewed versions, go in this queue. if self.use_es: must = [ es_filter.Terms(status=mkt.WEBAPPS_APPROVED_STATUSES), es_filter.Term(**{'latest_version.status': mkt.STATUS_PENDING}), es_filter.Terms(app_type=[mkt.ADDON_WEBAPP_PACKAGED, mkt.ADDON_WEBAPP_PRIVILEGED]), es_filter.Term(is_disabled=False), es_filter.Term(is_escalated=False), ] return WebappIndexer.search().filter('bool', must=must) return (Version.objects.filter( # Note: this will work as long as we disable files of existing # unreviewed versions when a new version is uploaded. files__status=mkt.STATUS_PENDING, addon__disabled_by_user=False, addon__is_packaged=True, addon__status__in=mkt.WEBAPPS_APPROVED_STATUSES) .exclude(addon__id__in=self.excluded_ids) .exclude(addon__tags__tag_text='homescreen') .order_by('nomination', 'created') .select_related('addon', 'files').no_transforms())
def field_to_native_es(self, obj, request): """ A version of field_to_native that uses ElasticSearch to fetch the apps belonging to the collection instead of SQL. Relies on a FeaturedSearchView instance in self.context['view'] to properly rehydrate results returned by ES. """ device = self._get_device(request) app_filters = {'profile': get_feature_profile(request)} if device and device != amo.DEVICE_DESKTOP: app_filters['device'] = device.id qs = WebappIndexer.get_app_filter(request, app_filters) qs = qs.filter('term', **{'collection.id': obj.pk}) qs = qs.sort({ 'collection.order': { 'order': 'asc', 'nested_filter': { 'term': { 'collection.id': obj.pk } } } }) return self.to_native(qs, use_es=True)
def get_apps(self, request, app_ids): """ Takes a list of app_ids. Gets the apps, including filters. Returns an app_map for serializer context. """ sq = WebappIndexer.search() if request.query_params.get('filtering', '1') == '1': # With filtering (default). for backend in self.filter_backends: sq = backend().filter_queryset(request, sq, self) sq = WebappIndexer.filter_by_apps(app_ids, sq) # Store the apps to attach to feed elements later. with statsd.timer('mkt.feed.views.apps_query'): apps = sq.execute().hits return dict((app.id, app) for app in apps)
def list(self, request, *args, **kwargs): if (not settings.RECOMMENDATIONS_ENABLED or not settings.RECOMMENDATIONS_API_URL or not self.request.user.is_authenticated()): return self._popular() else: app_ids = [] url = '{base_url}/api/v2/recommend/{limit}/{user_hash}/'.format( base_url=settings.RECOMMENDATIONS_API_URL, limit=20, user_hash=self.request.user.recommendation_hash) try: with statsd.timer('recommendation.get'): resp = requests.get( url, timeout=settings.RECOMMENDATIONS_API_TIMEOUT) if resp.status_code == 200: app_ids = resp.json()['recommendations'] except Timeout as e: log.warning(u'Recommendation timeout: {error}'.format(error=e)) except RequestException as e: # On recommendation API exceptions we return popular. log.error(u'Recommendation exception: {error}'.format(error=e)) if not app_ids: # Fall back to a popularity search. return self._popular() sq = WebappIndexer.get_app_filter(self.request, app_ids=app_ids) return Response( self.serializer_class(sq.execute().hits, many=True, context={ 'request': self.request }).data)
def get_updates_queue(self): if self.use_es: must = [ es_filter.Terms(status=amo.WEBAPPS_APPROVED_STATUSES), es_filter.Term( **{'latest_version.status': amo.STATUS_PENDING}), es_filter.Terms(app_type=[ amo.ADDON_WEBAPP_PACKAGED, amo.ADDON_WEBAPP_PRIVILEGED ]), es_filter.Term(is_disabled=False), es_filter.Term(is_escalated=False), ] return WebappIndexer.search().filter('bool', must=must) return (Version.objects.no_cache().filter( # Note: this will work as long as we disable files of existing # unreviewed versions when a new version is uploaded. files__status=amo.STATUS_PENDING, addon__disabled_by_user=False, addon__is_packaged=True, addon__status__in=amo.WEBAPPS_APPROVED_STATUSES).exclude( addon__id__in=self.excluded_ids).order_by( 'nomination', 'created').select_related('addon', 'files').no_transforms())
def list(self, request, *args, **kwargs): if (not settings.RECOMMENDATIONS_ENABLED or not settings.RECOMMENDATIONS_API_URL or not self.request.user.is_authenticated()): return self._popular() else: app_ids = [] url = '{base_url}/api/v2/recommend/{limit}/{user_hash}/'.format( base_url=settings.RECOMMENDATIONS_API_URL, limit=20, user_hash=self.request.user.recommendation_hash) try: with statsd.timer('recommendation.get'): resp = requests.get( url, timeout=settings.RECOMMENDATIONS_API_TIMEOUT) if resp.status_code == 200: app_ids = resp.json()['recommendations'] except Timeout as e: log.warning(u'Recommendation timeout: {error}'.format(error=e)) except RequestException as e: # On recommendation API exceptions we return popular. log.error(u'Recommendation exception: {error}'.format(error=e)) if not app_ids: # Fall back to a popularity search. return self._popular() sq = WebappIndexer.get_app_filter(self.request, app_ids=app_ids) return Response({ 'objects': self.serializer_class( sq.execute().hits, many=True, context={'request': self.request}).data})
def field_to_native_es(self, obj, request): """ A version of field_to_native that uses ElasticSearch to fetch the apps belonging to the collection instead of SQL. Relies on a FeaturedSearchView instance in self.context['view'] to properly rehydrate results returned by ES. """ device = self._get_device(request) app_filters = {'profile': get_feature_profile(request)} if device and device != amo.DEVICE_DESKTOP: app_filters['device'] = device.id qs = WebappIndexer.get_app_filter(request, app_filters) qs = qs.filter('term', **{'collection.id': obj.pk}) qs = qs.sort({ 'collection.order': { 'order': 'asc', 'nested_filter': { 'term': {'collection.id': obj.pk} } } }) return self.to_native(qs, use_es=True)
def get_updates_queue(self): # Updated apps, i.e. apps that have been published but have new # unreviewed versions, go in this queue. if self.use_es: must = [ es_filter.Terms(status=mkt.WEBAPPS_APPROVED_STATUSES), es_filter.Term( **{'latest_version.status': mkt.STATUS_PENDING}), es_filter.Terms(app_type=[ mkt.ADDON_WEBAPP_PACKAGED, mkt.ADDON_WEBAPP_PRIVILEGED ]), es_filter.Term(is_disabled=False), es_filter.Term(is_escalated=False), ] return WebappIndexer.search().filter('bool', must=must) return (Version.objects.filter( # Note: this will work as long as we disable files of existing # unreviewed versions when a new version is uploaded. files__status=mkt.STATUS_PENDING, addon__disabled_by_user=False, addon__is_packaged=True, addon__status__in=mkt.WEBAPPS_APPROVED_STATUSES).exclude( addon__id__in=self.excluded_ids).exclude( addon__tags__tag_text='homescreen').order_by( 'nomination', 'created').select_related('addon', 'files').no_transforms())
def test_single_hit(self): """Test the ESPaginator only queries ES one time.""" es = WebappIndexer.get_es() orig_search = es.search es.counter = 0 def monkey_search(*args, **kwargs): es.counter += 1 return orig_search(*args, **kwargs) es.search = monkey_search ESPaginator(WebappIndexer.search(), 5).object_list.execute() eq_(es.counter, 1) es.search = orig_search
def get_apps(self, request, app_ids): """ Takes a list of app_ids. Gets the apps, including filters. Returns an app_map for serializer context. """ sq = WebappIndexer.search() if request.QUERY_PARAMS.get('filtering', '1') == '1': # With filtering (default). for backend in self.filter_backends: sq = backend().filter_queryset(request, sq, self) sq = WebappIndexer.filter_by_apps(app_ids, sq) # Store the apps to attach to feed elements later. with statsd.timer('mkt.feed.views.apps_query'): apps = sq.execute().hits return dict((app.id, app) for app in apps)
def app_search(request): results = [] q = request.GET.get('q', u'').lower().strip() limit = (lkp.MAX_RESULTS if request.GET.get('all_results') else lkp.SEARCH_LIMIT) fields = ('name', 'app_slug') non_es_fields = ['id', 'name__localized_string'] + list(fields) if q.isnumeric(): qs = Webapp.objects.filter(pk=q).values(*non_es_fields)[:limit] else: # Try to load by GUID: qs = Webapp.objects.filter(guid=q).values(*non_es_fields)[:limit] if not qs.count(): qs = (WebappIndexer.search() .query(_expand_query(q, fields))[:limit]) # TODO: Update to `.fields(...)` when the DSL supports it. qs = qs.execute() for app in qs: if isinstance(app, dict): # This is a result from the database. app['url'] = reverse('lookup.app_summary', args=[app['id']]) app['name'] = app['name__localized_string'] results.append(app) else: # This is a result from elasticsearch which returns `Result` # objects and name as a list, one for each locale. for name in app.name: results.append({ 'id': app.id, 'url': reverse('lookup.app_summary', args=[app.id]), 'app_slug': app.get('app_slug'), 'name': name, }) return {'results': results}
def app_search(request): results = [] q = request.GET.get('q', u'').lower().strip() limit = (lkp.MAX_RESULTS if request.GET.get('all_results') else lkp.SEARCH_LIMIT) fields = ('name', 'app_slug') non_es_fields = ['id', 'name__localized_string'] + list(fields) if q.isnumeric(): qs = Webapp.objects.filter(pk=q).values(*non_es_fields)[:limit] else: # Try to load by GUID: qs = Webapp.objects.filter(guid=q).values(*non_es_fields)[:limit] if not qs.count(): # TODO: Update to `.fields(...)` when the DSL supports it. qs = (WebappIndexer.search() .query(_expand_query(q, fields))[:limit]) qs = qs.execute() for app in qs: if isinstance(app, dict): # This is a result from the database. app['url'] = reverse('lookup.app_summary', args=[app['id']]) app['name'] = app['name__localized_string'] results.append(app) else: # This is a result from elasticsearch which returns `Result` # objects and name as a list, one for each locale. for name in app.name: results.append({ 'id': app.id, 'url': reverse('lookup.app_summary', args=[app.id]), 'app_slug': app.get('app_slug'), 'name': name, }) return {'results': results}
def test_no_filter(self): # Set a couple apps as non-public, the count should decrease. self.apps[0].update(status=amo.STATUS_REJECTED) self.apps[1].update(status=amo.STATUS_PENDING) self.refresh('webapp') sq = WebappIndexer.get_app_filter(self.request, app_ids=self.app_ids) results = sq.execute().hits eq_(len(results), 9)
def _filter(self, req, filters, **kwargs): form = self.form_class(filters) if form.is_valid(): qs = WebappIndexer.from_search(self.req, **kwargs) return _filter_search( self.req, qs, form.cleaned_data).to_dict() else: return form.errors.copy()
def test_app_ids(self): """ Test all apps are returned if app IDs is passed. Natural ES limit is 10. """ sq = WebappIndexer.filter_by_apps(app_ids=self.app_ids) results = sq.execute().hits eq_(len(results), 11)
def _filter(self, req=None, data=None): req = req or RequestFactory().get('/', data=data or {}) req.user = AnonymousUser() queryset = WebappIndexer.search() for filter_class in self.filter_classes: queryset = filter_class().filter_queryset(req, queryset, self.view_class) return queryset.to_dict()
def test_mapping(self): mapping = WebappIndexer.get_mapping() eq_(mapping.keys(), ['webapp']) eq_(mapping['webapp']['_all'], {'enabled': False}) eq_(mapping['webapp']['properties']['boost'], { 'type': 'long', 'doc_values': True })
def search_webapps_and_homescreens(): return (Search( using=WebappIndexer.get_es(), index=[ settings.ES_INDEXES['homescreen'], settings.ES_INDEXES['webapp'] ], doc_type=['homescreen', 'webapp' ]).extra(_source={'exclude': WebappIndexer.hidden_fields}))
def test_app_ids(self): """ Test all apps are returned if app IDs is passed. Natural ES limit is 10. """ sq = WebappIndexer.get_app_filter(self.request, app_ids=self.app_ids) results = sq.execute().hits eq_(len(results), 11)
def test_indexable(self): homescreen = app_factory(name=u'Elegant Waffle', description=u'homescreen runner', created=self.days_ago(5), manifest_url='http://h.testmanifest.com') Tag(tag_text='homescreen').save_tag(homescreen) homescreen.save() q = WebappIndexer.get_indexable() eq_(list(q), [self.app])
def _filter(self, req, filters, **kwargs): form = self.form_class(filters) if form.is_valid(): form_data = form.cleaned_data sq = WebappIndexer.get_app_filter( self.req, search_form_to_es_fields(form_data)) return _sort_search(self.req, sq, form_data).to_dict() else: return form.errors.copy()
def setUp(self): self.apps = [amo.tests.app_factory() for i in range(3)] self.app_ids = [app.id for app in self.apps] self.brand = self.feed_brand_factory(app_ids=self.app_ids) self.data_es = self.brand.get_indexer().extract_document( None, obj=self.brand) self.app_map = dict((app.id, WebappIndexer.extract_document(app.id)) for app in self.apps)
def search(self, request): form_data = self.get_search_data(request) query = form_data.get('q', '') qs = WebappIndexer.search() if form_data.get('status') != 'any': qs = qs.filter('term', status=form_data.get('status')) qs = self.apply_filters(request, qs, data=form_data) qs = apply_reviewer_filters(request, qs, data=form_data) page = self.paginate_queryset(qs) return self.get_pagination_serializer(page), query
def setUp(self): self.webapp = Webapp.objects.get(pk=337141) self.request = RequestFactory().get('/') self.request.user = AnonymousUser() RegionMiddleware().process_request(self.request) self.reindex(Webapp) self.indexer = WebappIndexer.search().filter( 'term', id=self.webapp.id).execute().hits[0] self.serializer = SimpleESAppSerializer( self.indexer, context={'request': self.request})
def search(self, request): # Parse form. form = self.form_class(request.GET if request else None) if not form.is_valid(): raise form_errors(form) form_data = form.cleaned_data # Status filter. data = search_form_to_es_fields(form_data) if form_data.get('status') != 'any': data.update(status=form_data.get('status')) # Do filter. sq = apply_reviewer_filters(request, WebappIndexer.search(), data=form_data) sq = WebappIndexer.get_app_filter(request, data, sq=sq, no_filter=True) page = self.paginate_queryset(sq) return self.get_pagination_serializer(page), request.GET.get('q', '')
def get_apps(self, request, app_ids): """ Takes a list of app_ids. Gets the apps, including filters. Returns an app_map for serializer context. """ if request.QUERY_PARAMS.get('filtering', '1') == '0': # Without filtering. sq = WebappIndexer.search().filter(es_filter.Bool( should=[es_filter.Terms(id=app_ids)] ))[0:len(app_ids)] else: # With filtering. sq = WebappIndexer.get_app_filter(request, { 'device': self._get_device(request) }, app_ids=app_ids) # Store the apps to attach to feed elements later. apps = sq.execute().hits return dict((app.id, app) for app in apps)
def setUp(self): self.webapp = Webapp.objects.get(pk=337141) self.request = RequestFactory().get('/') self.request.user = AnonymousUser() RegionMiddleware().process_request(self.request) self.reindex(Webapp, 'webapp') self.indexer = WebappIndexer.search().filter( 'term', id=self.webapp.id).execute().hits[0] self.serializer = SimpleESAppSerializer(self.indexer, context={'request': self.request})
def get_escalated_queue(self): if self.use_es: must = [ es_filter.Term(is_disabled=False), es_filter.Term(is_escalated=True), ] return WebappIndexer.search().filter('bool', must=must) return EscalationQueue.objects.filter( addon__disabled_by_user=False)
def test_background_image(self): self.feedapp.update(type=feed.FEEDAPP_IMAGE, image_hash='LOL') self.data_es = self.feedapp.get_indexer().extract_document( None, obj=self.feedapp) self.app_map = { self.feedapp.app_id: WebappIndexer.extract_document( self.feedapp.app_id) } data = serializers.FeedAppESSerializer( self.data_es, context=self.context).data assert data['background_image'].endswith('image.png?LOL')
def _get_indices(self): # Check if we are filtering by a doc_type (e.g., apps, sites). # Default to all content types. doc_type = self.request.GET.get('doc_type', 'all') app_index = WebappIndexer.get_index() site_index = WebsiteIndexer.get_index() if doc_type == 'webapp': return [app_index] elif doc_type == 'website': return [site_index] return [app_index, site_index]
def get_escalated_queue(self): # Apps and homescreens flagged for escalation go in this queue. if self.use_es: must = [ es_filter.Term(is_disabled=False), es_filter.Term(is_escalated=True), ] return WebappIndexer.search().filter('bool', must=must) return EscalationQueue.objects.filter( addon__disabled_by_user=False)
def _get_doc_types(self): # Check if we are filtering by a doc_type (e.g., apps, sites). # Default to all content types. doc_type = self.request.GET.get('doc_type', 'all') app_doc = WebappIndexer.get_mapping_type_name() site_doc = WebsiteIndexer.get_mapping_type_name() if doc_type == 'webapp': return [app_doc] elif doc_type == 'website': return [site_doc] return [app_doc, site_doc]
def test_app_ids(self): """ Test all apps are returned if app IDs is passed. Natural ES limit is 10. """ app_ids = [amo.tests.app_factory().id for i in range(11)] self.refresh('webapp') sq = WebappIndexer.get_app_filter(amo.tests.req_factory_factory(), app_ids=app_ids) results = sq.execute().hits eq_(len(results), 11)
def unindex_webapps(ids, **kw): if not ids: return task_log.info('Un-indexing apps %s-%s. [%s]' % (ids[0], ids[-1], len(ids))) index = kw.pop('index', WebappIndexer.get_index()) # Note: If reindexing is currently occurring, `get_indices` will return # more than one index. indices = Reindexing.get_indices(index) es = WebappIndexer.get_es(urls=settings.ES_URLS) for id_ in ids: for idx in indices: try: WebappIndexer.unindex(id_=id_, es=es, index=idx) except ElasticHttpNotFoundError: # Ignore if it's not there. task_log.info( u'[Webapp:%s] Unindexing app but not found in index' % id_)
def test_background_image(self): self.feedapp.update(type=feed.FEEDAPP_IMAGE, image_hash='LOL') self.data_es = self.feedapp.get_indexer().extract_document( None, obj=self.feedapp) self.app_map = { self.feedapp.app_id: WebappIndexer.extract_document(self.feedapp.app_id) } data = serializers.FeedAppESSerializer(self.data_es, context=self.context).data assert data['background_image'].endswith('image.png?LOL')
def setUp(self): self.apps = [amo.tests.app_factory() for i in range(3)] self.app_ids = [app.id for app in self.apps] self.shelf = self.feed_shelf_factory( app_ids=self.app_ids, description={'de': 'test'}, name={'en-US': 'test'}) self.data_es = self.shelf.get_indexer().extract_document( None, obj=self.shelf) self.app_map = dict((app.id, WebappIndexer.extract_document(app.id)) for app in self.apps)
def get_rereview_queue(self): if self.use_es: must = [ es_filter.Term(is_rereviewed=True), es_filter.Term(is_disabled=False), es_filter.Term(is_escalated=False), ] return WebappIndexer.search().filter('bool', must=must) return (RereviewQueue.objects.no_cache().filter( addon__disabled_by_user=False).exclude( addon__in=self.excluded_ids))
def test_excluded_fields(self): ok_(WebappIndexer.hidden_fields) data = WebappIndexer.search().execute().hits eq_(len(data), 1) obj = data[0] ok_('trending_2' not in obj) ok_('popularity_2' not in obj) ok_('name_translations' in obj) ok_('name' not in obj) ok_('name_l10n_english' not in obj) ok_('name_sort' not in obj) ok_('name.raw' not in obj)