def _filter_request(self, request: request.Request, queryset: QuerySet, team: Team) -> QuerySet: if request.GET.get("id"): people = request.GET["id"].split(",") queryset = queryset.filter(id__in=people) if request.GET.get("search"): parts = request.GET["search"].split(" ") contains = [] for part in parts: if ":" in part: queryset = queryset.filter(properties__has_key=part.split(":")[1]) else: contains.append(part) queryset = queryset.filter( Q(properties__icontains=" ".join(contains)) | Q(persondistinctid__distinct_id__icontains=" ".join(contains)) ).distinct("id") if request.GET.get("cohort"): queryset = queryset.filter(cohort__id=request.GET["cohort"]) if request.GET.get("properties"): queryset = queryset.filter( Filter(data={"properties": json.loads(request.GET["properties"])}).properties_to_Q(team_id=team.pk) ) queryset_category_pass = None category = request.query_params.get("category") if category == "identified": queryset_category_pass = queryset.filter elif category == "anonymous": queryset_category_pass = queryset.exclude if queryset_category_pass is not None: queryset = queryset_category_pass(is_identified=True) queryset = queryset.prefetch_related(Prefetch("persondistinctid_set", to_attr="distinct_ids_cache")) return queryset
def _filter_request(self, request: request.Request, queryset: QuerySet) -> QuerySet: filters = request.GET.dict() for key in filters: if key == "after": queryset = queryset.filter(created_at__gt=request.GET["after"]) elif key == "before": queryset = queryset.filter( created_at__lt=request.GET["before"]) elif key == "dashboardItemId": queryset = queryset.filter( dashboard_item_id=request.GET["dashboardItemId"]) elif key == "scope": queryset = queryset.filter(scope=request.GET["scope"]) elif key == "apply_all": queryset_method = (queryset.exclude if bool( strtobool(str(request.GET["apply_all"]))) else queryset.filter) queryset = queryset_method(scope="dashboard_item") elif key == "deleted": queryset = queryset.filter( deleted=bool(strtobool(str(request.GET["deleted"])))) return queryset
def _filter_request(self, request: request.Request, queryset: QuerySet, team: Team) -> QuerySet: if request.GET.get("id"): people = request.GET["id"].split(",") queryset = queryset.filter(id__in=people) if request.GET.get("search"): parts = request.GET["search"].split(" ") contains = [] for part in parts: if ":" in part: queryset = queryset.filter( properties__has_key=part.split(":")[1]) else: contains.append(part) queryset = queryset.filter( properties__icontains=" ".join(contains)) if request.GET.get("cohort"): queryset = queryset.filter(cohort__id=request.GET["cohort"]) if request.GET.get("properties"): queryset = queryset.filter( Filter(data={ "properties": json.loads(request.GET["properties"]) }).properties_to_Q(team_id=team.pk)) queryset = queryset.prefetch_related( Prefetch("persondistinctid_set", to_attr="distinct_ids_cache")) return queryset
def filter_queryset_by_date(self, queryset: QuerySet) -> QuerySet: """ limit the size of the queryset by filtering by date if they are available in context of a request. Args: queryset: unfiltered queryset if the view Returns: queryset - same as input but filtered by date """ start_date = self.request.query_params.get('startDate', None) if start_date: start_date = self.validate_date(start_date, 'startDate') end_date = self.request.query_params.get('endDate', None) if end_date: end_date = self.validate_date(end_date, 'endDate') if start_date and not end_date: queryset = queryset.filter(dt__gte=start_date) elif not start_date and end_date: queryset = queryset.filter(dt__lte=end_date) elif start_date and end_date: self.validate_date_config(start_date, end_date) queryset = queryset.filter(dt__range=( start_date, end_date, )) return queryset
def test_validate_order_client_location( user: User, admin: User, user_locations: QuerySet, services: QuerySet, ): """Should validate orders client locations.""" service = services.exclude(professional__user=user).first() order = Order() order.client = user order.service = service order.service.service_type = Service.TYPE_CLIENT_LOCATION with pytest.raises(ValidationError) as error: validate_order_client_location(order) assert "client location is empty" in str(error) order.client_location = user_locations.filter(user=admin).first() with pytest.raises(ValidationError) as error: validate_order_client_location(order) assert "client location from the other user" in str(error) order.service.service_type = Service.TYPE_ONLINE validate_order_client_location(order) order.service.service_type = Service.TYPE_CLIENT_LOCATION order.client_location = user_locations.filter(user=user).first() validate_order_service_location(order) order.service = None # type: ignore with pytest.raises(ValidationError) as error: validate_order_client_location(order) assert "The service, client, or client location is empty" in str(error)
def filter_by_semester(queryset: QuerySet, year: Optional[int], semester: Optional[int]) -> QuerySet: if year is not None: queryset = queryset.filter(year=year) if semester is not None: queryset = queryset.filter(semester=semester) return queryset
def _objects_within_period( self, query_objects: QuerySet, up_to: datetime.datetime, starting_from: Optional[datetime.datetime] = None, ) -> QuerySet: """Retrieve the reports within a specific period of time. The objects are filtered considering a specific period of time to allow for comparable results between subclasses. The lower bound should be omitted for the first update but always set for later calls. The upper bound must be specified to avoid race conditions. Args: query_objects: The reports to filter. up_to: The maximum timestamp to consider (inclusive). starting_from: The minimum timestamp to consider (exclusive). Returns: The reports received within a specific period of time. """ # pylint: disable=no-self-use # self might be used by subclasses. query_objects = query_objects.filter(created_at__lte=up_to) if starting_from: query_objects = query_objects.filter(created_at__gt=starting_from) return query_objects
def get_appointments_in_datetime_range( queryset: models.QuerySet, datetime_from: Optional[datetime.datetime] = None, datetime_to: Optional[datetime.datetime] = None, exclude_statuses: Optional[List[AppointmentStatus]] = None, **kwargs) -> models.QuerySet: """ Filter queryset of appointments based on given datetime range. :param queryset: Queryset to filter :param datetime_from: datetime at which first appointment is present :param datetime_to: datetime by which last appointment starts :param exclude_statuses: list of statuses to be excluded from the resulting query :param kwargs: any optional filter kwargs to be applied :return: Resulting Appointment queryset """ if datetime_from is not None: queryset = queryset.filter(datetime_start_at__gt=datetime_from - models.F('stylist__service_time_gap')) if datetime_to is not None: queryset = queryset.filter(datetime_start_at__lt=datetime_to) if exclude_statuses: assert isinstance(exclude_statuses, list) queryset = queryset.exclude(status__in=exclude_statuses) return queryset.filter(**kwargs)
def run_default_filter( validity_range: ValidityRange, qs: QuerySet, for_date: Optional[date] = None, filter_term: bool = True, filter_deleted: bool = True, ) -> QuerySet: """Add a default filter in order to select the correct term.""" term_id, schoolyear_id, school_id, version_id = ( validity_range.import_ref_untis, validity_range.school_year_untis, validity_range.school_id_untis, validity_range.version_id_untis, ) qs = run_using(qs).filter( school_id=school_id, schoolyear_id=schoolyear_id, version_id=version_id, ) if filter_term: qs = qs.filter(term_id=term_id) if filter_deleted: qs = qs.filter(deleted=0) return qs
def key_or_null_filter(self, queryset: QuerySet, name: str, value: str) -> QuerySet: if self.request and self.request.user and self.request.user.is_authenticated: return queryset.filter((models.Q(user=self.request.user) & models.Q(key=value)) | models.Q(key__isnull=True)) return queryset.filter(user__isnull=True)
def user_filter(self, queryset: QuerySet, name: str, value: str) -> QuerySet: if self.request and self.request.user and self.request.user.is_authenticated: return queryset.filter(**{ 'user': self.request.user, }) return queryset.filter(user__isnull=True)
def _calculate_people(events: QuerySet, offset: int): events = events.values("person_id").distinct() if request.GET.get( "breakdown_type" ) == "cohort" and request.GET.get("breakdown_value") != "all": events = events.filter( Exists( CohortPeople.objects.filter( cohort_id=int(request.GET["breakdown_value"]), person_id=OuterRef("person_id"), ).only("id"))) if request.GET.get("breakdown_type") == "person": events = events.filter( Exists( Person.objects.filter( **{ "id": OuterRef("person_id"), "properties__{}".format(request.GET["breakdown"]): request.GET["breakdown_value"], }).only("id"))) people = Person.objects.filter( team=team, id__in=[p["person_id"] for p in events[offset:offset + 100]]) people = people.prefetch_related( Prefetch("persondistinctid_set", to_attr="distinct_ids_cache")) return PersonSerializer(people, context={ "request": request }, many=True).data
def user_story_overworked_pie_chart_data(queryset: QuerySet) -> List: queryset = queryset.exclude(status=UserStory.Status.BACKLOG) acceptable_estimation_count = queryset.filter( annotated_actual_effort__gte=F("planned_effort") * 0.9, annotated_actual_effort__lte=F("planned_effort") * 1.1, ).count() underestimated_count = queryset.filter( annotated_actual_effort__gt=F("planned_effort") * 1.1).count() overestimated_count = queryset.filter( annotated_actual_effort__lt=F("planned_effort") * 0.9).count() return [ { "name": _("Estimación aceptable (+-10%)"), "color": "#4CAF50", "y": acceptable_estimation_count }, { "name": _("Sobreestimación (>10%)"), "color": "#FF9800", "y": overestimated_count }, { "name": _("Subestimación (<10%)"), "color": "#F44336", "y": underestimated_count }, ]
def filter(search_str, qs: QuerySet, _or_lookup, _and_lookup=None, _not_lookup=None): search_list = re.split(r'\s*,\s*', search_str.strip().strip(",")) _not_search_list = [ i[1:].strip() for i in search_list if i.startswith('-') ] _and_search_list = [ i[1:].strip() for i in search_list if i.startswith('&') ] _or_search_list = [i for i in search_list if i[0] not in ['-', '&']] if _or_search_list: query = reduce(operator.or_, (Q(**{_or_lookup: i}) for i in _or_search_list)) qs = qs.filter(query) if _and_search_list: query = reduce(operator.and_, (Q(**{_and_lookup or _or_lookup: i}) for i in _and_search_list)) qs = qs.filter(query) if _not_search_list: query = reduce(operator.or_, (Q(**{_not_lookup or _or_lookup: i}) for i in _not_search_list)) qs = qs.exclude(query) return qs
def filter(queryset: QuerySet, request: Request) -> QuerySet: queryset = Manager_Workers.filter_list( queryset=queryset, request=request, name_filter='workersSelected', name_field='id_worker' ) states_block = set(request.query_params.getlist('statesBlock[]')) combine_with_and = json.loads(request.query_params.get('combineWithAnd', 'false')) # if len(states_block) > 0: if combine_with_and == True: print('COMBINE WITH AND') block_soft = 'block_soft' in states_block queryset = queryset.filter(is_blocked_global=block_soft) block_project = 'block_project' in states_block if block_project == True: queryset = queryset.filter(worker_blocks_project__project=database_object_project) else: queryset = queryset.exclude(worker_blocks_project__project=database_object_project) else: pass return queryset
def filter_by_state(self, queryset: QuerySet, field_name, value): if value == Reservation.State.OPEN.value: return queryset.filter(state__in=Reservation.OPEN_RESERVATION_STATES) if value in Reservation.State.values: return queryset.filter(state=value) raise ValidationError(_('Invalid state filter.'), code='invalid_state')
def queryset(self, request: HttpRequest, queryset: QuerySet): val = self.value() if val == '1': return queryset.filter(setup__isnull=False) elif val == '0': return queryset.filter(setup__isnull=True) else: return queryset
def filter_images(self, images: QuerySet = None) -> QuerySet: if images is None: images = GroundTruthImage.objects.all() if self.specialized_organ: images = images.filter(plant_organ=self.specialized_organ) if self.specialized_background: images = images.filter(background_type=self.specialized_background) return images
def base_currency_latest_values_filter(self, queryset: QuerySet, name: str, value: str) -> QuerySet: queryset = queryset.filter(base_currency=value) latest = queryset.filter( base_currency=OuterRef('base_currency')).order_by('-value_date') return queryset.annotate(base_currency_latest=Subquery( latest.values('value_date')[:1])).filter( value_date=models.F('base_currency_latest'))
def apply_filter_spec(records: QuerySet, filter_spec_str: str) -> QuerySet: """ Filter `records` based on `filter_spec_str`. filter_spec_str looks something like '1-4n-9ge-11-24le'. Dash-separated tokens, with each token having a filter ID number and possibly a suffix indicating how to apply the filter. """ filter_spec_item_strs = filter_spec_str.split('-') filter_spec_item_regex = re.compile(r'(\d+)([a-z]*)') for item_index, item_str in enumerate(filter_spec_item_strs): regex_match = filter_spec_item_regex.fullmatch(item_str) if not regex_match: raise ValueError( f"Could not parse filter spec: {filter_spec_str}") filter_id, type_suffix = regex_match.groups() filter = Filter.objects.get(id=filter_id) match type_suffix: case '': # No suffix; basic filter matching. if filter.usage_type == Filter.UsageTypes.CHOOSABLE.value: # The record uses this filter. records = records.filter(filters=filter_id) elif filter.usage_type == Filter.UsageTypes.IMPLIED.value: # The record has a filter that implies this filter. records = records.filter( filters__in=filter.incoming_filter_implications.all()) case 'n': # Negation. if filter.usage_type == Filter.UsageTypes.CHOOSABLE.value: # The record has a filter in this group that doesn't # match the specified filter. records = records \ .filter(filters__filter_group=filter.filter_group) \ .exclude(filters=filter_id) elif filter.usage_type == Filter.UsageTypes.IMPLIED.value: # The record has a filter in this group that doesn't # imply the specified filter. records = records \ .filter(filters__filter_group=filter.filter_group) \ .exclude(filters__in=filter.incoming_filter_implications.all()) case 'le': # Less than or equal to, for numeric filters. records = records.filter( filters__filter_group=filter.filter_group, filters__numeric_value__lte=filter.numeric_value) case 'ge': # Greater than or equal to, for numeric filters. records = records.filter( filters__filter_group=filter.filter_group, filters__numeric_value__gte=filter.numeric_value) case _: raise ValueError( f"Unknown filter type suffix: {type_suffix}") return records
def execute_on(self, query: QuerySet): """ Execute a rule on a set of player cubes :param QuerySet[PlayerCube] query: a list of player cubes to apply rule on :return: QuerySet """ if self.predicate is not None: query = query.filter(self.predicate.as_filters()) return query.filter(self.as_filters())
def key_filter(self, queryset: QuerySet, name: str, value: str) -> QuerySet: """ Filter on key if request.user is set and authenticated """ if self.request and self.request.user and \ self.request.user.is_authenticated: return queryset.filter(**{'user': self.request.user, 'key': value}) return queryset.filter(user__isnull=True)
def _fetch_by(self, summaries_to_retrigger: QuerySet, frameworks: List[str], repositories: List[str]) -> QuerySet: if frameworks: summaries_to_retrigger = summaries_to_retrigger.filter( framework__name__in=frameworks) if repositories: summaries_to_retrigger = summaries_to_retrigger.filter( repository__name__in=repositories) return summaries_to_retrigger
def _projects_filter_for_non_privileged_users(user: User, queryset: QuerySet, project_relation: str, action: str = 'create'): if not user.is_anonymous and (user.is_admin or user.is_superuser): return queryset.distinct() # Construct the public projects filter field lookup. project_filter = project_relation + 'is_public' # Filter the object list into two querysets; # One where the related Projects are public and one where they are private public_objects = queryset.filter(**{project_filter: True}).distinct() private_objects = queryset.filter(**{project_filter: False}).distinct() # In case of an anonymous user, only return the public objects if user.is_anonymous: unpublished_exclude = project_relation + 'publishingstatus__status' queryset = public_objects.exclude( **{ unpublished_exclude: PublishingStatus.STATUS_UNPUBLISHED }).distinct() # Otherwise, check to which objects the user has (change) permission elif private_objects.exists(): include_user_owned = hasattr(queryset.model, 'user') if action == 'list': # The view permission is new, and previously only the change # permission existed. To avoid adding new view permissions for # all the objects, we also check if a user has change # permissions, which implicitly implies view permissions. change_permission = type( private_objects[0])._meta.db_table.replace('_', '.change_') change_filter = user.get_permission_filter(change_permission, project_relation, include_user_owned) change_objects = private_objects.filter(change_filter).distinct() # Check if user has view permission on the queryset view_permission = change_permission.replace('.change_', '.view_') view_filter = user.get_permission_filter(view_permission, project_relation, include_user_owned) view_objects = private_objects.filter(view_filter).distinct() private_objects = (change_objects | view_objects).distinct() else: permission = type(private_objects[0])._meta.db_table.replace( '_', '.change_') filter_ = user.get_permission_filter(permission, project_relation, include_user_owned) private_objects = private_objects.filter(filter_).distinct() queryset = public_objects | private_objects return queryset.distinct()
def _fields_queryset( self, queryset: QuerySet, params: QueryDict, schema_id: int ): # Query based only on the text fields text_query = params.get('query', '') if text_query: try: schema: ItemSchema = ItemSchema.objects.get(pk=int(schema_id)) except (ObjectDoesNotExist, ValueError): raise NotFound( f'ItemSchema with pk={schema_id} does not exist.' ) fields = schema.query_fields if len(fields): fields_query = Q(text_values__field_id=fields[0]) for index in range(1, len(fields)): fields_query = fields_query | Q( text_values__field_id=fields[index] ) queryset = queryset.filter( fields_query, text_values__value__icontains=text_query ) else: queryset = queryset.filter( text_values__value__icontains=text_query ) # Query based on any field else: for param in params: query = self._parse_query(param) if query is not None: field_id, query_data = query if query_data['multi']: value = params.getlist(param) else: value = params.get(param) value = query_data['parser'].to_internal_value(value) if isinstance(field_id, int): queryset = queryset.filter(Q(**{ query_data['field']: field_id, query_data['value']: value, })) if isinstance(field_id, list): continue return queryset
def __filter_category(query: QuerySet, category: CategoryIdentifier) -> QuerySet: """Helper method to filter out category""" if isinstance(category, str): # search by name query = query.filter(categories__name=category) elif isinstance(category, int): # search by id query = query.filter(categories__id=category) elif isinstance(category, Category): # search by object query = query.filter(categories=category) return query
def _filter_request(self, request: request.Request, queryset: QuerySet) -> QuerySet: for key, _ in request.GET.items(): if key == "after": queryset = queryset.filter(created_at__gt=request.GET["after"]) elif key == "before": queryset = queryset.filter(created_at__lt=request.GET["before"]) elif key == "dashboardItemId": queryset = queryset.filter( dashboard_item_id=request.GET["dashboardItemId"] ) return queryset
def filter_queryset(self, request: Request, queryset: QuerySet, view: View) -> QuerySet: if request.user.role == RoleEnum.USER: return queryset.filter(id=request.user.id) elif request.user.role == RoleEnum.MANAGER: return queryset.filter(Q(role=int(RoleEnum.USER)) | Q(role=int(RoleEnum.MANAGER))) elif request.user.role == RoleEnum.ADMIN: return queryset else: self._logger.error(f'Unknown role {request.user.role}') return queryset.none()
def set_price_range(self, value: str, qs: QuerySet): try: value = tuple(map(Decimal, value.split(','))) if len(value) > 2: raise ValueError() except (InvalidOperation, ValueError): raise ParseError('price should be [int[,int]]') else: if len(value) == 2: return qs.filter(current_price__range=value) return qs.filter(current_price__gte=value[0])
def get_actions(queryset: QuerySet, params: dict, team_id: int) -> QuerySet: if params.get(TREND_FILTER_TYPE_ACTIONS): queryset = queryset.filter( pk__in=[action.id for action in Filter({"actions": json.loads(params.get("actions", "[]"))}).actions] ) if params.get("include_count"): queryset = queryset.annotate(count=Count(TREND_FILTER_TYPE_EVENTS)) queryset = queryset.prefetch_related(Prefetch("steps", queryset=ActionStep.objects.order_by("id"))) return queryset.filter(team_id=team_id).order_by("-id")
def queryset_iterator(queryset: QuerySet, chunksize: int=5000) -> Iterator[Any]: queryset = queryset.order_by('id') while queryset.exists(): for row in queryset[:chunksize]: msg_id = row.id yield row queryset = queryset.filter(id__gt=msg_id)
def compute_activity(self, user_activity_objects: QuerySet) -> None: # Report data from the past week. # # This is a rough report of client activity because we inconsistently # register activity from various clients; think of it as telling you # approximately how many people from a group have used a particular # client recently. For example, this might be useful to get a sense of # how popular different versions of a desktop client are. # # Importantly, this does NOT tell you anything about the relative # volumes of requests from clients. threshold = timezone_now() - datetime.timedelta(days=7) client_counts = user_activity_objects.filter( last_visit__gt=threshold).values("client__name").annotate( count=Count('client__name')) total = 0 counts = [] for client_type in client_counts: count = client_type["count"] client = client_type["client__name"] total += count counts.append((count, client)) counts.sort() for count in counts: print("%25s %15d" % (count[1], count[0])) print("Total:", total)
def filter_queryset(self, qs: QuerySet) -> QuerySet: return qs.filter( min_years_experience__gte=self.min_years_experience, min_years_experience__lte=self.max_years_experience, education_level=self.education_level )
def filter_queryset(self, qs: QuerySet) -> QuerySet: return qs.filter( min_years_experience__gte=self.min_years_experience, education_level__in=self.get_valid_education_levels() )
def gather_hot_conversations(user_profile: UserProfile, stream_messages: QuerySet) -> List[Dict[str, Any]]: # Gather stream conversations of 2 types: # 1. long conversations # 2. conversations where many different people participated # # Returns a list of dictionaries containing the templating # information for each hot conversation. conversation_length = defaultdict(int) # type: Dict[Tuple[int, Text], int] conversation_diversity = defaultdict(set) # type: Dict[Tuple[int, Text], Set[Text]] for user_message in stream_messages: if not user_message.message.sent_by_human(): # Don't include automated messages in the count. continue key = (user_message.message.recipient.type_id, user_message.message.subject) conversation_diversity[key].add( user_message.message.sender.full_name) conversation_length[key] += 1 diversity_list = list(conversation_diversity.items()) diversity_list.sort(key=lambda entry: len(entry[1]), reverse=True) length_list = list(conversation_length.items()) length_list.sort(key=lambda entry: entry[1], reverse=True) # Get up to the 4 best conversations from the diversity list # and length list, filtering out overlapping conversations. hot_conversations = [elt[0] for elt in diversity_list[:2]] for candidate, _ in length_list: if candidate not in hot_conversations: hot_conversations.append(candidate) if len(hot_conversations) >= 4: break # There was so much overlap between the diversity and length lists that we # still have < 4 conversations. Try to use remaining diversity items to pad # out the hot conversations. num_convos = len(hot_conversations) if num_convos < 4: hot_conversations.extend([elt[0] for elt in diversity_list[num_convos:4]]) hot_conversation_render_payloads = [] for h in hot_conversations: stream_id, subject = h users = list(conversation_diversity[h]) count = conversation_length[h] # We'll display up to 2 messages from the conversation. first_few_messages = [user_message.message for user_message in stream_messages.filter( message__recipient__type_id=stream_id, message__subject=subject)[:2]] teaser_data = {"participants": users, "count": count - len(first_few_messages), "first_few_messages": build_message_list( user_profile, first_few_messages)} hot_conversation_render_payloads.append(teaser_data) return hot_conversation_render_payloads