def event_list(request): query = request.GET.get('query', '') try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 qs = request.user.get_events_with_any_permission(request).filter( Q(name__icontains=i18ncomp(query)) | Q(slug__icontains=query) | Q(organizer__name__icontains=i18ncomp(query)) | Q(organizer__slug__icontains=query)).annotate( min_from=Min('subevents__date_from'), max_from=Max('subevents__date_from'), max_to=Max('subevents__date_to'), max_fromto=Greatest(Max('subevents__date_to'), Max('subevents__date_from'))).annotate( order_from=Coalesce( 'min_from', 'date_from'), ).order_by('-order_from') total = qs.count() pagesize = 20 offset = (page - 1) * pagesize doc = { 'results': [ serialize_event(e) for e in qs.select_related('organizer')[offset:offset + pagesize] ], 'pagination': { "more": total >= (offset + pagesize) } } return JsonResponse(doc)
def variations_select2(request, **kwargs): query = request.GET.get('query', '') try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 q = Q(item__event=request.event) for word in query.split(): q &= Q(value__icontains=i18ncomp(word)) | Q(item__name__icontains=i18ncomp(ord)) qs = ItemVariation.objects.filter(q).order_by('item__position', 'item__name', 'position', 'value').select_related('item') total = qs.count() pagesize = 20 offset = (page - 1) * pagesize doc = { 'results': [ { 'id': e.pk, 'text': str(e.item) + " – " + str(e), } for e in qs[offset:offset + pagesize] ], 'pagination': { "more": total >= (offset + pagesize) } } return JsonResponse(doc)
def event_list(request): query = request.GET.get('query', '') try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 qs = request.user.get_events_with_any_permission(request).filter( Q(name__icontains=i18ncomp(query)) | Q(slug__icontains=query) | Q(organizer__name__icontains=i18ncomp(query)) | Q(organizer__slug__icontains=query) ).annotate( min_from=Min('subevents__date_from'), max_from=Max('subevents__date_from'), max_to=Max('subevents__date_to'), max_fromto=Greatest(Max('subevents__date_to'), Max('subevents__date_from')) ).annotate( order_from=Coalesce('min_from', 'date_from'), ).order_by('-order_from') total = qs.count() pagesize = 20 offset = (page - 1) * pagesize doc = { 'results': [ serialize_event(e) for e in qs.select_related('organizer')[offset:offset + pagesize] ], 'pagination': { "more": total >= (offset + pagesize) } } return JsonResponse(doc)
def event_list(request): query = request.GET.get('query', '') try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 qs = request.user.get_events_with_any_permission(request).filter( Q(name__icontains=i18ncomp(query)) | Q(slug__icontains=query) | Q(organizer__name__icontains=i18ncomp(query)) | Q(organizer__slug__icontains=query) ).annotate( min_from=Min('subevents__date_from'), max_from=Max('subevents__date_from'), max_to=Max('subevents__date_to'), max_fromto=Greatest(Max('subevents__date_to'), Max('subevents__date_from')) ).annotate( order_from=Coalesce('min_from', 'date_from'), ).order_by('-order_from') def serialize(e): dr = e.get_date_range_display() if e.has_subevents: if e.min_from is None: dr = pgettext('subevent', 'No dates') else: tz = pytz.timezone(e.settings.timezone) dr = _('Series:') + ' ' + daterange( e.min_from.astimezone(tz), (e.max_fromto or e.max_to or e.max_from).astimezone(tz) ) return { 'id': e.pk, 'slug': e.slug, 'organizer': str(e.organizer.name), 'name': str(e.name), 'text': str(e.name), 'date_range': dr, 'url': reverse('control:event.index', kwargs={ 'event': e.slug, 'organizer': e.organizer.slug }) } total = qs.count() pagesize = 20 offset = (page - 1) * pagesize doc = { 'results': [ serialize(e) for e in qs.select_related('organizer')[offset:offset + pagesize] ], 'pagination': { "more": total >= (offset + pagesize) } } return JsonResponse(doc)
def filter_qs(self, qs): fdata = self.cleaned_data if fdata.get('status') == 'active': qs = qs.filter(active=True) elif fdata.get('status') == 'running': qs = qs.filter(active=True).filter( Q(presale_start__isnull=True) | Q(presale_start__lte=now())).filter( Q(presale_end__isnull=True) | Q(presale_end__gte=now())) elif fdata.get('status') == 'inactive': qs = qs.filter(active=False) elif fdata.get('status') == 'future': qs = qs.filter(presale_start__gte=now()) elif fdata.get('status') == 'past': qs = qs.filter(presale_end__lte=now()) if fdata.get('query'): query = fdata.get('query') qs = qs.filter( Q(name__icontains=i18ncomp(query)) | Q(location__icontains=query)) if fdata.get('ordering'): qs = qs.order_by(self.get_order_by()) return qs
def filter_qs(self, qs): fdata = self.cleaned_data if fdata.get('status') == 'active': qs = qs.filter(active=True) elif fdata.get('status') == 'running': qs = qs.filter( active=True ).filter( Q(presale_start__isnull=True) | Q(presale_start__lte=now()) ).filter( Q(presale_end__isnull=True) | Q(presale_end__gte=now()) ) elif fdata.get('status') == 'inactive': qs = qs.filter(active=False) elif fdata.get('status') == 'future': qs = qs.filter(presale_start__gte=now()) elif fdata.get('status') == 'past': qs = qs.filter(presale_end__lte=now()) if fdata.get('weekday'): qs = qs.annotate(wday=ExtractWeekDay('date_from')).filter(wday=fdata.get('weekday')) if fdata.get('query'): query = fdata.get('query') qs = qs.filter( Q(name__icontains=i18ncomp(query)) | Q(location__icontains=query) ) if fdata.get('ordering'): qs = qs.order_by(self.get_order_by()) return qs
def filter_qs(self, qs): fdata = self.cleaned_data if fdata.get('status') == 'active': qs = qs.filter(active=True) elif fdata.get('status') == 'running': qs = qs.filter( active=True ).filter( Q(presale_start__isnull=True) | Q(presale_start__lte=now()) ).filter( Q(Q(presale_end__isnull=True) & Q( Q(date_to__gte=now()) | Q(date_to__isnull=True, date_from__gte=now()) )) | Q(presale_end__gte=now()) ) elif fdata.get('status') == 'inactive': qs = qs.filter(active=False) elif fdata.get('status') == 'future': qs = qs.filter(presale_start__gte=now()) elif fdata.get('status') == 'past': qs = qs.filter( Q(presale_end__lte=now()) | Q( Q(presale_end__isnull=True) & Q( Q(date_to__lte=now()) | Q(date_to__isnull=True, date_from__gte=now()) ) ) ) if fdata.get('weekday'): qs = qs.annotate(wday=ExtractWeekDay('date_from')).filter(wday=fdata.get('weekday')) if fdata.get('query'): query = fdata.get('query') qs = qs.filter( Q(name__icontains=i18ncomp(query)) | Q(location__icontains=query) ) if fdata.get('date'): date_start = make_aware(datetime.combine( fdata.get('date'), time(hour=0, minute=0, second=0, microsecond=0) ), get_current_timezone()) date_end = make_aware(datetime.combine( fdata.get('date'), time(hour=23, minute=59, second=59, microsecond=999999) ), get_current_timezone()) qs = qs.filter( Q(date_to__isnull=True, date_from__gte=date_start, date_from__lte=date_end) | Q(date_to__isnull=False, date_from__lte=date_end, date_to__gte=date_start) ) if fdata.get('ordering'): qs = qs.order_by(self.get_order_by()) else: qs = qs.order_by('-date_from') return qs
def category_select2(request, **kwargs): query = request.GET.get('query', '') try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 qs = request.event.categories.filter( name__icontains=i18ncomp(query) ).order_by('name') total = qs.count() pagesize = 20 offset = (page - 1) * pagesize doc = { 'results': [ { 'id': e.pk, 'text': str(e), } for e in qs[offset:offset + pagesize] ], 'pagination': { "more": total >= (offset + pagesize) } } return JsonResponse(doc)
def subevent_select2(request, **kwargs): query = request.GET.get('query', '') try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 qs = request.event.subevents.filter( Q(name__icontains=i18ncomp(query)) | Q(location__icontains=query)).order_by('-date_from') total = qs.count() pagesize = 20 offset = (page - 1) * pagesize doc = { 'results': [{ 'id': e.pk, 'name': str(e.name), 'date_range': e.get_date_range_display(), 'text': '{} – {}'.format(e.name, e.get_date_range_display()), } for e in qs[offset:offset + pagesize]], 'pagination': { "more": total >= (offset + pagesize) } } return JsonResponse(doc)
def nav_context_list(request): query = request.GET.get('query', '') organizer = request.GET.get('organizer', None) try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 qs_events = request.user.get_events_with_any_permission(request).filter( Q(name__icontains=i18ncomp(query)) | Q(slug__icontains=query) ).annotate( min_from=Min('subevents__date_from'), max_from=Max('subevents__date_from'), max_to=Max('subevents__date_to'), max_fromto=Greatest(Max('subevents__date_to'), Max('subevents__date_from')) ).annotate( order_from=Coalesce('min_from', 'date_from'), ).order_by('-order_from') if request.user.has_active_staff_session(request.session.session_key): qs_orga = Organizer.objects.all() else: qs_orga = Organizer.objects.filter(pk__in=request.user.teams.values_list('organizer', flat=True)) if query: qs_orga = qs_orga.filter(Q(name__icontains=query) | Q(slug__icontains=query)) show_user = not query or ( query and request.user.email and query.lower() in request.user.email.lower() ) or ( query and request.user.fullname and query.lower() in request.user.fullname.lower() ) total = qs_events.count() + qs_orga.count() pagesize = 20 offset = (page - 1) * pagesize results = ([ serialize_user(request.user) ] if show_user else []) + [ serialize_orga(e) for e in qs_orga[offset:offset + (pagesize if query else 5)] ] + [ serialize_event(e) for e in qs_events.select_related('organizer')[offset:offset + (pagesize if query else 5)] ] if show_user and organizer: organizer = serialize_orga(Organizer.objects.get(pk=organizer)) if organizer in results: results.remove(organizer) results.insert(1, organizer) doc = { 'results': results, 'pagination': { "more": total >= (offset + pagesize) } } return JsonResponse(doc)
def filter_qs(self, qs): fdata = self.cleaned_data if fdata.get('status') == 'live': qs = qs.filter(live=True) elif fdata.get('status') == 'running': qs = qs.filter( live=True ).filter( Q(presale_start__isnull=True) | Q(presale_start__lte=now()) ).filter( Q(presale_end__isnull=True) | Q(presale_end__gte=now()) ) elif fdata.get('status') == 'notlive': qs = qs.filter(live=False) elif fdata.get('status') == 'future': qs = qs.filter(presale_start__gte=now()) elif fdata.get('status') == 'past': qs = qs.filter(presale_end__lte=now()) elif fdata.get('status') == 'date_future': qs = qs.filter( Q(has_subevents=False) & Q( Q(Q(date_to__isnull=True) & Q(date_from__gte=now())) | Q(Q(date_to__isnull=False) & Q(date_to__gte=now())) ) ) elif fdata.get('status') == 'date_past': qs = qs.filter( Q(has_subevents=False) & Q( Q(Q(date_to__isnull=True) & Q(date_from__lt=now())) | Q(Q(date_to__isnull=False) & Q(date_to__lt=now())) ) ) elif fdata.get('status') == 'series': qs = qs.filter(has_subevents=True) if fdata.get('organizer'): qs = qs.filter(organizer=fdata.get('organizer')) if fdata.get('query'): query = fdata.get('query') qs = qs.filter( Q(name__icontains=i18ncomp(query)) | Q(slug__icontains=query) ) if fdata.get('ordering'): qs = qs.order_by(self.get_order_by()) return qs
def filter_qs(self, qs): fdata = self.cleaned_data if fdata.get('status') == 'live': qs = qs.filter(live=True) elif fdata.get('status') == 'running': qs = qs.filter( live=True ).filter( Q(presale_start__isnull=True) | Q(presale_start__lte=now()) ).filter( Q(presale_end__isnull=True) | Q(presale_end__gte=now()) ) elif fdata.get('status') == 'notlive': qs = qs.filter(live=False) elif fdata.get('status') == 'future': qs = qs.filter(presale_start__gte=now()) elif fdata.get('status') == 'past': qs = qs.filter(presale_end__lte=now()) elif fdata.get('status') == 'date_future': qs = qs.filter( Q(has_subevents=False) & Q( Q(Q(date_to__isnull=True) & Q(date_from__gte=now())) | Q(Q(date_to__isnull=False) & Q(date_to__gte=now())) ) ) elif fdata.get('status') == 'date_past': qs = qs.filter( Q(has_subevents=False) & Q( Q(Q(date_to__isnull=True) & Q(date_from__lt=now())) | Q(Q(date_to__isnull=False) & Q(date_to__lt=now())) ) ) elif fdata.get('status') == 'series': qs = qs.filter(has_subevents=True) if fdata.get('organizer'): qs = qs.filter(organizer=fdata.get('organizer')) if fdata.get('query'): query = fdata.get('query') qs = qs.filter( Q(name__icontains=i18ncomp(query)) | Q(slug__icontains=query) ) if fdata.get('ordering'): qs = qs.order_by(self.get_order_by()) return qs
def subevent_select2(request, **kwargs): query = request.GET.get('query', '') try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 qf = Q(name__icontains=i18ncomp(query)) | Q(location__icontains=query) tz = request.event.timezone dt = None for f in get_format('DATE_INPUT_FORMATS'): try: dt = datetime.strptime(query, f) break except (ValueError, TypeError): continue if dt: dt_start = make_aware( datetime.combine(dt.date(), time(hour=0, minute=0, second=0)), tz) dt_end = make_aware( datetime.combine(dt.date(), time(hour=23, minute=59, second=59)), tz) qf |= Q(date_from__gte=dt_start) & Q(date_from__lte=dt_end) qs = request.event.subevents.filter(qf).order_by('-date_from', 'name', 'pk') total = qs.count() pagesize = 20 offset = (page - 1) * pagesize doc = { 'results': [{ 'id': e.pk, 'name': str(e.name), 'date_range': e.get_date_range_display() + (" " + date_format(e.date_from.astimezone(tz), "TIME_FORMAT") if e.settings.show_times else ""), 'text': str(e) } for e in qs[offset:offset + pagesize]], 'pagination': { "more": total >= (offset + pagesize) } } return JsonResponse(doc)
def nav_context_list(request): query = request.GET.get('query', '') try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 qs_events = request.user.get_events_with_any_permission(request).filter( Q(name__icontains=i18ncomp(query)) | Q(slug__icontains=query)).annotate( min_from=Min('subevents__date_from'), max_from=Max('subevents__date_from'), max_to=Max('subevents__date_to'), max_fromto=Greatest(Max('subevents__date_to'), Max('subevents__date_from'))).annotate( order_from=Coalesce( 'min_from', 'date_from'), ).order_by('-order_from') if request.user.has_active_staff_session(request.session.session_key): qs_orga = Organizer.objects.all() else: qs_orga = Organizer.objects.filter( pk__in=request.user.teams.values_list('organizer', flat=True)) if query: qs_orga = qs_orga.filter( Q(name__icontains=query) | Q(slug__icontains=query)) show_user = not query or ( query and request.user.email and query.lower() in request.user.email.lower()) or ( query and request.user.fullname and query.lower() in request.user.fullname.lower()) total = qs_events.count() + qs_orga.count() pagesize = 20 offset = (page - 1) * pagesize results = ([serialize_user(request.user)] if show_user else []) + [ serialize_orga(e) for e in qs_orga[offset:offset + (pagesize if query else 5)] ] + [ serialize_event(e) for e in qs_events.select_related('organizer') [offset:offset + (pagesize if query else 5)] ] doc = { 'results': results, 'pagination': { "more": total >= (offset + pagesize) } } return JsonResponse(doc)
def filter_qs(self, qs): fdata = self.cleaned_data if fdata.get('status') == 'active': qs = qs.filter(active=True) elif fdata.get('status') == 'running': qs = qs.filter( active=True ).filter( Q(presale_start__isnull=True) | Q(presale_start__lte=now()) ).filter( Q(presale_end__isnull=True) | Q(presale_end__gte=now()) ) elif fdata.get('status') == 'inactive': qs = qs.filter(active=False) elif fdata.get('status') == 'future': qs = qs.filter(presale_start__gte=now()) elif fdata.get('status') == 'past': qs = qs.filter(presale_end__lte=now()) if fdata.get('weekday'): qs = qs.annotate(wday=ExtractWeekDay('date_from')).filter(wday=fdata.get('weekday')) if fdata.get('query'): query = fdata.get('query') qs = qs.filter( Q(name__icontains=i18ncomp(query)) | Q(location__icontains=query) ) if fdata.get('date'): date_start = make_aware(datetime.combine( fdata.get('date'), time(hour=0, minute=0, second=0, microsecond=0) ), get_current_timezone()) date_end = make_aware(datetime.combine( fdata.get('date'), time(hour=23, minute=59, second=59, microsecond=999999) ), get_current_timezone()) qs = qs.filter( Q(date_to__isnull=True, date_from__gte=date_start, date_from__lte=date_end) | Q(date_to__isnull=False, date_from__lte=date_end, date_to__gte=date_start) ) if fdata.get('ordering'): qs = qs.order_by(self.get_order_by()) return qs
def checkinlist_select2(request, **kwargs): query = request.GET.get('query', '') try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 qf = Q(name__icontains=i18ncomp(query)) try: dt = parse(query) except ValueError: pass else: tz = request.event.timezone if dt and request.event.has_subevents: dt_start = make_aware( datetime.combine(dt.date(), time(hour=0, minute=0, second=0)), tz) dt_end = make_aware( datetime.combine(dt.date(), time(hour=23, minute=59, second=59)), tz) qf |= Q(subevent__date_from__gte=dt_start) & Q( subevent__date_from__lte=dt_end) qs = request.event.checkin_lists.select_related('subevent').filter( qf).order_by('name') total = qs.count() pagesize = 20 offset = (page - 1) * pagesize doc = { 'results': [{ 'id': e.pk, 'text': str(e.name), 'event': str(e.subevent) if request.event.has_subevents and e.subevent else None, } for e in qs[offset:offset + pagesize]], 'pagination': { "more": total >= (offset + pagesize) } } return JsonResponse(doc)
def subevent_select2(request, **kwargs): query = request.GET.get('query', '') try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 qf = Q(name__icontains=i18ncomp(query)) | Q(location__icontains=query) tz = request.event.timezone try: dt = parse(query) except ValueError: pass else: if dt: dt_start = make_aware( datetime.combine(dt.date(), time(hour=0, minute=0, second=0)), tz) dt_end = make_aware( datetime.combine(dt.date(), time(hour=23, minute=59, second=59)), tz) qf |= Q(date_from__gte=dt_start) & Q(date_from__lte=dt_end) qs = request.event.subevents.filter(qf).order_by('-date_from') total = qs.count() pagesize = 20 offset = (page - 1) * pagesize doc = { 'results': [{ 'id': e.pk, 'name': str(e.name), 'date_range': e.get_date_range_display(), 'text': '{} – {}'.format(e.name, e.get_date_range_display()), } for e in qs[offset:offset + pagesize]], 'pagination': { "more": total >= (offset + pagesize) } } return JsonResponse(doc)
def subevent_select2(request, **kwargs): query = request.GET.get('query', '') try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 qf = Q(name__icontains=i18ncomp(query)) | Q(location__icontains=query) tz = request.event.timezone dt = None for f in get_format('DATE_INPUT_FORMATS'): try: dt = datetime.strptime(query, f) break except (ValueError, TypeError): continue if dt: dt_start = make_aware(datetime.combine(dt.date(), time(hour=0, minute=0, second=0)), tz) dt_end = make_aware(datetime.combine(dt.date(), time(hour=23, minute=59, second=59)), tz) qf |= Q(date_from__gte=dt_start) & Q(date_from__lte=dt_end) qs = request.event.subevents.filter( qf ).order_by('-date_from') total = qs.count() pagesize = 20 offset = (page - 1) * pagesize doc = { 'results': [ { 'id': e.pk, 'name': str(e.name), 'date_range': e.get_date_range_display(), 'text': '{} – {}'.format(e.name, e.get_date_range_display()), } for e in qs[offset:offset + pagesize] ], 'pagination': { "more": total >= (offset + pagesize) } } return JsonResponse(doc)
def quotas_select2(request, **kwargs): query = request.GET.get('query', '') try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 qf = Q(name__icontains=query) | Q( subevent__name__icontains=i18ncomp(query)) tz = request.event.timezone dt = None for f in get_format('DATE_INPUT_FORMATS'): try: dt = datetime.strptime(query, f) break except (ValueError, TypeError): continue if dt and request.event.has_subevents: dt_start = make_aware( datetime.combine(dt.date(), time(hour=0, minute=0, second=0)), tz) dt_end = make_aware( datetime.combine(dt.date(), time(hour=23, minute=59, second=59)), tz) qf |= Q(subevent__date_from__gte=dt_start) & Q( subevent__date_from__lte=dt_end) qs = request.event.quotas.filter(qf).order_by('-subevent__date_from', 'name') total = qs.count() pagesize = 20 offset = (page - 1) * pagesize doc = { 'results': [{ 'id': q.pk, 'name': str(q.name), 'text': q.name } for q in qs[offset:offset + pagesize]], 'pagination': { "more": total >= (offset + pagesize) } } return JsonResponse(doc)
def checkinlist_select2(request, **kwargs): query = request.GET.get('query', '') try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 qf = Q(name__icontains=i18ncomp(query)) try: dt = parse(query) except ValueError: pass else: tz = request.event.timezone if dt and request.event.has_subevents: dt_start = make_aware(datetime.combine(dt.date(), time(hour=0, minute=0, second=0)), tz) dt_end = make_aware(datetime.combine(dt.date(), time(hour=23, minute=59, second=59)), tz) qf |= Q(subevent__date_from__gte=dt_start) & Q(subevent__date_from__lte=dt_end) qs = request.event.checkin_lists.select_related('subevent').filter( qf ).order_by('name') total = qs.count() pagesize = 20 offset = (page - 1) * pagesize doc = { 'results': [ { 'id': e.pk, 'text': str(e.name), 'event': str(e.subevent) if request.event.has_subevents and e.subevent else None, } for e in qs[offset:offset + pagesize] ], 'pagination': { "more": total >= (offset + pagesize) } } return JsonResponse(doc)
def itemvarquota_select2(request, **kwargs): query = request.GET.get('query', '') try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 choices = [] if not request.event.has_subevents: # We are very unlikely to need pagination itemqs = request.event.items.prefetch_related('variations').filter( name__icontains=i18ncomp(query)) quotaqs = request.event.quotas.filter(name__icontains=query) more = False else: # We can't do proper pagination on a UNION-like query, so we hack it. if query: # Don't paginate quotaf = Q(name__icontains=query) try: dt = parse(query) except ValueError: pass else: tz = request.event.timezone if dt and request.event.has_subevents: dt_start = make_aware( datetime.combine(dt.date(), time(hour=0, minute=0, second=0)), tz) dt_end = make_aware( datetime.combine(dt.date(), time(hour=23, minute=59, second=59)), tz) quotaf |= Q(subevent__date_from__gte=dt_start) & Q( subevent__date_from__lte=dt_end) itemqs = request.event.items.prefetch_related('variations').filter( name__icontains=i18ncomp(query)) quotaqs = request.event.quotas.filter(quotaf).select_related( 'subevent') more = False else: if page == 1: itemqs = request.event.items.prefetch_related( 'variations').filter(name__icontains=i18ncomp(query)) else: itemqs = request.event.items.none() quotaqs = request.event.quotas.filter( name__icontains=query).select_related('subevent') total = quotaqs.count() pagesize = 20 offset = (page - 1) * pagesize quotaqs = quotaqs[offset:offset + pagesize] more = total >= (offset + pagesize) for i in itemqs: variations = list(i.variations.all()) if variations: choices.append( (str(i.pk), _('{product} – Any variation').format(product=i.name), '')) for v in variations: choices.append(('%d-%d' % (i.pk, v.pk), '%s – %s' % (i.name, v.value), '')) else: choices.append((str(i.pk), i.name, '')) for q in quotaqs: if request.event.has_subevents: choices.append( ('q-%d' % q.pk, _('Any product in quota "{quota}"').format(quota=q), str(q.subevent))) else: choices.append( ('q-%d' % q.pk, _('Any product in quota "{quota}"').format(quota=q), '')) doc = { 'results': [{ 'id': k, 'text': str(v), 'event': str(t), } for k, v, t in choices], 'pagination': { "more": more } } return JsonResponse(doc)
def nav_context_list(request): query = request.GET.get('query', '').strip() organizer = request.GET.get('organizer', None) try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 qs_events = request.user.get_events_with_any_permission(request).filter( Q(name__icontains=i18ncomp(query)) | Q(slug__icontains=query) ).annotate( min_from=Min('subevents__date_from'), max_from=Max('subevents__date_from'), max_to=Max('subevents__date_to'), max_fromto=Greatest(Max('subevents__date_to'), Max('subevents__date_from')) ).annotate( order_from=Coalesce('min_from', 'date_from'), ).order_by('-order_from') if request.user.has_active_staff_session(request.session.session_key): qs_orga = Organizer.objects.all() else: qs_orga = Organizer.objects.filter(pk__in=request.user.teams.values_list('organizer', flat=True)) if query: qs_orga = qs_orga.filter(Q(name__icontains=query) | Q(slug__icontains=query)) if query: qs_orders = Order.objects.filter(code__icontains=query).select_related('event', 'event__organizer') if not request.user.has_active_staff_session(request.session.session_key): qs_orders = qs_orders.filter( Q(event__organizer_id__in=request.user.teams.filter( all_events=True, can_view_orders=True).values_list('organizer', flat=True)) | Q(event_id__in=request.user.teams.filter( can_view_orders=True).values_list('limit_events__id', flat=True)) ) qs_vouchers = Voucher.objects.filter(code__icontains=query).select_related('event', 'event__organizer') if not request.user.has_active_staff_session(request.session.session_key): qs_vouchers = qs_vouchers.filter( Q(event__organizer_id__in=request.user.teams.filter( all_events=True, can_view_vouchers=True).values_list('organizer', flat=True)) | Q(event_id__in=request.user.teams.filter( can_view_vouchers=True).values_list('limit_events__id', flat=True)) ) else: qs_vouchers = Voucher.objects.none() qs_orders = Order.objects.none() show_user = not query or ( query and request.user.email and query.lower() in request.user.email.lower() ) or ( query and request.user.fullname and query.lower() in request.user.fullname.lower() ) total = qs_events.count() + qs_orga.count() pagesize = 20 offset = (page - 1) * pagesize results = ([ serialize_user(request.user) ] if show_user else []) + [ serialize_orga(e) for e in qs_orga[offset:offset + (pagesize if query else 5)] ] + [ serialize_event(e) for e in qs_events.select_related('organizer')[offset:offset + (pagesize if query else 5)] ] + [ serialize_order(e) for e in qs_orders[offset:offset + (pagesize if query else 5)] ] + [ serialize_voucher(e) for e in qs_vouchers[offset:offset + (pagesize if query else 5)] ] if show_user and organizer: try: organizer = Organizer.objects.get(pk=organizer) except Organizer.DoesNotExist: pass else: if request.user.has_organizer_permission(organizer, request=request): organizer = serialize_orga(organizer) if organizer in results: results.remove(organizer) results.insert(1, organizer) doc = { 'results': results, 'pagination': { "more": total >= (offset + pagesize) } } return JsonResponse(doc)
def filter_qs(self, qs): fdata = self.cleaned_data if fdata.get('status') == 'live': qs = qs.filter(live=True) elif fdata.get('status') == 'running': qs = qs.filter( live=True ).annotate( p_end=Coalesce(F('presale_end'), F('date_to'), F('date_from')) ).filter( Q(presale_start__isnull=True) | Q(presale_start__lte=now()) ).filter( Q(p_end__gte=now()) ) elif fdata.get('status') == 'notlive': qs = qs.filter(live=False) elif fdata.get('status') == 'future': qs = qs.filter(presale_start__gte=now()) elif fdata.get('status') == 'past': qs = qs.filter(presale_end__lte=now()) elif fdata.get('status') == 'date_future': qs = qs.filter( Q(has_subevents=False) & Q( Q(Q(date_to__isnull=True) & Q(date_from__gte=now())) | Q(Q(date_to__isnull=False) & Q(date_to__gte=now())) ) ) elif fdata.get('status') == 'date_past': qs = qs.filter( Q(has_subevents=False) & Q( Q(Q(date_to__isnull=True) & Q(date_from__lt=now())) | Q(Q(date_to__isnull=False) & Q(date_to__lt=now())) ) ) elif fdata.get('status') == 'series': qs = qs.filter(has_subevents=True) if fdata.get('organizer'): qs = qs.filter(organizer=fdata.get('organizer')) if fdata.get('query'): query = fdata.get('query') qs = qs.filter( Q(name__icontains=i18ncomp(query)) | Q(slug__icontains=query) ) filters_by_property_name = {} for i, p in enumerate(self.meta_properties): d = fdata.get('meta_{}'.format(p.name)) if d: emv_with_value = EventMetaValue.objects.filter( event=OuterRef('pk'), property__pk=p.pk, value=d ) emv_with_any_value = EventMetaValue.objects.filter( event=OuterRef('pk'), property__pk=p.pk, ) qs = qs.annotate(**{'attr_{}'.format(i): Exists(emv_with_value)}) if p.name in filters_by_property_name: filters_by_property_name[p.name] |= Q(**{'attr_{}'.format(i): True}) else: filters_by_property_name[p.name] = Q(**{'attr_{}'.format(i): True}) if p.default == d: qs = qs.annotate(**{'attr_{}_any'.format(i): Exists(emv_with_any_value)}) filters_by_property_name[p.name] |= Q(**{'attr_{}_any'.format(i): False, 'organizer_id': p.organizer_id}) for f in filters_by_property_name.values(): qs = qs.filter(f) if fdata.get('ordering'): qs = qs.order_by(self.get_order_by()) return qs
def itemvarquota_select2(request, **kwargs): query = request.GET.get('query', '') try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 choices = [] if not request.event.has_subevents: # We are very unlikely to need pagination itemqs = request.event.items.prefetch_related('variations').filter(name__icontains=i18ncomp(query)) quotaqs = request.event.quotas.filter(name__icontains=query) more = False else: # We can't do proper pagination on a UNION-like query, so we hack it. if query: # Don't paginate quotaf = Q(name__icontains=query) try: dt = parse(query) except ValueError: pass else: tz = request.event.timezone if dt and request.event.has_subevents: dt_start = make_aware(datetime.combine(dt.date(), time(hour=0, minute=0, second=0)), tz) dt_end = make_aware(datetime.combine(dt.date(), time(hour=23, minute=59, second=59)), tz) quotaf |= Q(subevent__date_from__gte=dt_start) & Q(subevent__date_from__lte=dt_end) itemqs = request.event.items.prefetch_related('variations').filter( Q(name__icontains=i18ncomp(query)) | Q(internal_name__icontains=query) ) quotaqs = request.event.quotas.filter(quotaf).select_related('subevent') more = False else: if page == 1: itemqs = request.event.items.prefetch_related('variations').filter( Q(name__icontains=i18ncomp(query)) | Q(internal_name__icontains=query) ) else: itemqs = request.event.items.none() quotaqs = request.event.quotas.filter(name__icontains=query).select_related('subevent') total = quotaqs.count() pagesize = 20 offset = (page - 1) * pagesize quotaqs = quotaqs[offset:offset + pagesize] more = total >= (offset + pagesize) for i in itemqs: variations = list(i.variations.all()) if variations: choices.append((str(i.pk), _('{product} – Any variation').format(product=i), '')) for v in variations: choices.append(('%d-%d' % (i.pk, v.pk), '%s – %s' % (i, v.value), '')) else: choices.append((str(i.pk), str(i), '')) for q in quotaqs: if request.event.has_subevents: choices.append(('q-%d' % q.pk, _('Any product in quota "{quota}"').format( quota=q ), str(q.subevent))) else: choices.append(('q-%d' % q.pk, _('Any product in quota "{quota}"').format(quota=q), '')) doc = { 'results': [ { 'id': k, 'text': str(v), 'event': str(t), } for k, v, t in choices ], 'pagination': { "more": more } } return JsonResponse(doc)
def search_qs(self, queryset, name, value): return queryset.filter( Q(name__icontains=i18ncomp(value)) | Q(slug__icontains=value) | Q(location__icontains=i18ncomp(value)))