def get_updates_queue(self): # Updated apps, i.e. apps that have been published but have new # unreviewed versions, go in this queue. if self.use_es: must = [ es_filter.Terms(status=mkt.WEBAPPS_APPROVED_STATUSES), es_filter.Term( **{'latest_version.status': mkt.STATUS_PENDING}), es_filter.Terms(app_type=[ mkt.ADDON_WEBAPP_PACKAGED, mkt.ADDON_WEBAPP_PRIVILEGED ]), es_filter.Term(is_disabled=False), es_filter.Term(is_escalated=False), ] return WebappIndexer.search().filter('bool', must=must) return (Version.objects.filter( # Note: this will work as long as we disable files of existing # unreviewed versions when a new version is uploaded. files__status=mkt.STATUS_PENDING, addon__disabled_by_user=False, addon__is_packaged=True, addon__status__in=mkt.WEBAPPS_APPROVED_STATUSES).exclude( addon__id__in=self.excluded_ids).exclude( addon__tags__tag_text='homescreen').order_by( 'nomination', 'created').select_related('addon', 'files').no_transforms())
def get_updates_queue(self): if self.use_es: must = [ es_filter.Terms(status=amo.WEBAPPS_APPROVED_STATUSES), es_filter.Term( **{'latest_version.status': amo.STATUS_PENDING}), es_filter.Terms(app_type=[ amo.ADDON_WEBAPP_PACKAGED, amo.ADDON_WEBAPP_PRIVILEGED ]), es_filter.Term(is_disabled=False), es_filter.Term(is_escalated=False), ] return WebappIndexer.search().filter('bool', must=must) return (Version.objects.no_cache().filter( # Note: this will work as long as we disable files of existing # unreviewed versions when a new version is uploaded. files__status=amo.STATUS_PENDING, addon__disabled_by_user=False, addon__is_packaged=True, addon__status__in=amo.WEBAPPS_APPROVED_STATUSES).exclude( addon__id__in=self.excluded_ids).order_by( 'nomination', 'created').select_related('addon', 'files').no_transforms())
def get_queryset(self): qs = Contribution.search_objects.search() feature_types = self.request.QUERY_PARAMS.getlist('feature_types') if feature_types: qs = qs.filter( es_filter.Terms( **{'content.feature_type.slug': feature_types})) contributors = self.request.QUERY_PARAMS.getlist('contributors') if contributors: qs = qs.filter( es_filter.Terms(**{'contributor.username': contributors})) tags = self.request.QUERY_PARAMS.getlist('tags') if tags: pass staff = self.request.QUERY_PARAMS.get('staff', None) if staff: is_freelance = True if staff == 'freelance' else False qs = qs.filter( es_filter.Term(**{'contributor.is_freelance': is_freelance})) qs = qs.sort('id') return ESPublishedFilterBackend().filter_queryset( self.request, qs, None)
def search(self): """Return a search using the combined query of all associated special coverage objects.""" # Retrieve all Or filters pertinent to the special coverage query. should_filters = [ es_filter.Terms(pk=self.query.get("included_ids", [])), es_filter.Terms(pk=self.query.get("pinned_ids", [])) ] should_filters += self.get_group_filters() # Compile list of all Must filters. must_filters = [ es_filter.Bool(should=should_filters), ~es_filter.Terms(pk=self.query.get("excluded_ids", [])) ] return Content.search_objects.search().filter(es_filter.Bool(must=must_filters))
def get_queryset(self): qs = ReportContent.search_objects.search() # TODO: reintroduce forced submissions # include, exclude = get_forced_payment_contributions(start_date, end_date) # include_ids = include.values_list("content__id", flat=True) # exclude_ids = exclude.values_list("content__id", flat=True) # content = Content.objects.filter( # contributions__gt=0 # ).filter( # published__range=(start_date, end_date) # ).exclude( # pk__in=exclude_ids # ).prefetch_related( # "authors", "contributions" # ).select_related( # "feature_type" # ).distinct() | Content.objects.filter(pk__in=include_ids).distinct() if "feature_types" in self.request.QUERY_PARAMS: feature_types = self.request.QUERY_PARAMS.getlist("feature_types") qs = qs.filter(FeatureTypes(feature_types)) if "tags" in self.request.QUERY_PARAMS: tags = self.request.QUERY_PARAMS.getlist("tags") qs = qs.filter(Tags(tags)) # if "staff" in self.request.QUERY_PARAMS: # staff = self.request.QUERY_PARAMS.get("staff") # if staff == "freelance": # contribution_content_ids = Contribution.objects.filter( # contributor__freelanceprofile__is_freelance=True # ).values_list( # "content__id", flat=True # ).distinct() # elif staff == "staff": # contribution_content_ids = Contribution.objects.filter( # contributor__freelanceprofile__is_freelance=False # ).values_list( # "content__id", flat=True # ).distinct() # if contribution_content_ids: # content = content.filter(pk__in=contribution_content_ids) if "contributors" in self.request.QUERY_PARAMS: contributors = self.request.QUERY_PARAMS.getlist("contributors") qs = qs.filter( es_filter.Terms( **{'contributions.contributor.username': contributors})) # contribution_content_ids = Contribution.objects.filter( # contributor__username__in=contributors # ).values_list( # "content__id", flat=True # ).distinct() # content = content.filter(pk__in=contribution_content_ids) return qs
def test_search_next_generator(es_client): management.call_command("sync_es") mommy.make(SimpleObject, baz="tired", _quantity=10) mommy.make(SimpleObject, baz="awake", _quantity=10) SimpleObject.search_objects.refresh() search = SimpleObject.search_objects.search() for item in search: assert item assert item == six.next(search) search = SimpleObject.search_objects.search().filter( es_filter.Terms(**{"baz": ["tired", "awake"]})) search_results = [] for i in range(search.count()): search_results.append(six.next(search)) assert len(search_results) == 20
def get_apps(self, request, app_ids): """ Takes a list of app_ids. Gets the apps, including filters. Returns an app_map for serializer context. """ if request.QUERY_PARAMS.get('filtering', '1') == '0': # Without filtering. sq = WebappIndexer.search().filter(es_filter.Bool( should=[es_filter.Terms(id=app_ids)] ))[0:len(app_ids)] else: # With filtering. sq = WebappIndexer.get_app_filter(request, { 'device': self._get_device(request) }, app_ids=app_ids) # Store the apps to attach to feed elements later. apps = sq.execute().hits return dict((app.id, app) for app in apps)
def test_stop_iteration_exception(self): queryset1 = Content.search_objects.search( feature_types=[self.feature_type1.slug]) queryset2 = Content.search_objects.search( feature_types=[self.feature_type2.slug]) queryset2 = queryset2.filter( es_filter.Terms(**{"id": [queryset2[0].id]})) reading_list = SearchSlicer() reading_list.register_queryset(queryset1) def even_validator(index): return bool(index % 2 == 0) reading_list.register_queryset(queryset2, validator=even_validator) out = [obj for obj in reading_list] for obj in queryset1: self.assertIn(obj, out) for obj in queryset2: index = out.index(obj) self.assertTrue(bool(index % 2 == 0))
def get_app_filter(cls, request, additional_data=None, sq=None, app_ids=None, no_filter=False): """ THE grand, consolidated ES filter for Webapps. By default: - Excludes non-public apps. - Excludes disabled apps (whether by reviewer or by developer). - Excludes based on region exclusions. - TODO: Excludes based on device and platform support. additional_data -- an object with more data to allow more filtering. sq -- if you have an existing search object to filter off of. app_ids -- if you want to filter by a list of app IDs. no_filter -- doesn't apply the consumer-side excludes (public/region). """ from mkt.api.base import get_region_from_request from mkt.search.views import name_query sq = sq or cls.search() additional_data = additional_data or {} app_ids = app_ids or [] data = { 'app_type': [], 'author.raw': None, 'category': None, # Slug. 'device': None, # ID. 'gaia': getattr(request, 'GAIA', False), 'is_offline': None, 'manifest_url': '', 'mobile': getattr(request, 'MOBILE', False), 'premium_type': [], 'profile': get_feature_profile(request), 'q': '', 'region': getattr(get_region_from_request(request), 'id', None), 'status': None, 'supported_locales': [], 'tablet': getattr(request, 'TABLET', False), 'tags': '', } data.update(additional_data) # Fields that will be filtered with a term query. term_fields = ('author.raw', 'device', 'manifest_url', 'status', 'tags') # Fields that will be filtered with a terms query. terms_fields = ('category', 'premium_type', 'app_type', 'supported_locales') # QUERY. if data['q']: # Function score for popularity boosting (defaults to multiply). sq = sq.query( 'function_score', query=name_query(data['q'].lower()), functions=[query.SF('field_value_factor', field='boost')]) # MUST. must = [ F('term', status=amo.STATUS_PUBLIC), F('term', is_disabled=False), ] if not no_filter else [] for field in term_fields + terms_fields: # Term filters. if data[field]: filter_type = 'term' if field in term_fields else 'terms' must.append(F(filter_type, **{field: data[field]})) if not no_filter: if data['profile']: # Feature filters. profile = data['profile'] for k, v in profile.to_kwargs(prefix='features.has_').items(): must.append(F('term', **{k: v})) if data['mobile'] or data['gaia']: # Uses flash. must.append(F('term', uses_flash=False)) if data['is_offline'] is not None: must.append(F('term', is_offline=data['is_offline'])) # SHOULD. should = [] if app_ids: should = [es_filter.Terms(id=list(set(app_ids)))] sq = sq[0:len(set(app_ids))] # FILTER. if must or should: sq = sq.filter(es_filter.Bool(must=must, should=should)) if data['region'] and not no_filter: # Region exclusions. sq = sq.filter(~F('term', region_exclusions=data['region'])) return sq