def handle(self, *args, **options): username = options['user'] xform_id_string = options['xform'] purge = options['purge'] # Retrieve all instances with the same `uuid`. query = Instance.objects if xform_id_string: query = query.filter(xform__id_string=xform_id_string) if username: query = query.filter(xform__user__username=username) # if we don't purge, we don't want to see instances # that have been marked as deleted. However, if we do purge # we do need these instances to be in the list in order # to delete them permanently if not purge: query = query.filter(deleted_at=None) query = query.values_list('uuid', flat=True)\ .annotate(count_uuid=Count('uuid'))\ .filter(count_uuid__gt=1)\ .distinct() for uuid in query.all(): duplicated_query = Instance.objects.filter(uuid=uuid) # if we don't purge, we don't want to see instances # that have been marked as deleted. However, if we do purge # we do need these instances to be in the list in order # to delete them permanently if not purge: duplicated_query = duplicated_query.filter(deleted_at=None) instances_with_same_uuid = duplicated_query.values_list('id', 'xml_hash')\ .order_by('xml_hash', 'date_created') xml_hash_ref = None instance_id_ref = None duplicated_instance_ids = [] for instance_with_same_uuid in instances_with_same_uuid: instance_id = instance_with_same_uuid[0] instance_xml_hash = instance_with_same_uuid[1] if instance_xml_hash != xml_hash_ref: self.__clean_up(instance_id_ref, duplicated_instance_ids, purge) xml_hash_ref = instance_xml_hash instance_id_ref = instance_id duplicated_instance_ids = [] continue duplicated_instance_ids.append(instance_id) self.__clean_up(instance_id_ref, duplicated_instance_ids, purge) if not self.__vaccuum: if purge: self.stdout.write('No instances have been purged.') else: self.stdout.write('No instances have been marked as deleted.') else: # Update number of submissions for each user. for user_ in list(self.__users): result = XForm.objects.filter(user_id=user_.id)\ .aggregate(count=Sum('num_of_submissions')) user_.profile.num_of_submissions = result['count'] self.stdout.write( "\tUpdating `{}`'s number of submissions".format( user_.username)) user_.profile.save(update_fields=['num_of_submissions']) self.stdout.write('\t\tDone! New number: {}'.format( result['count']))
def get_categories(): # Count 计算分类下的文章数,其接受的参数为需要计数的模型的名称 return Category.objects.annotate(num_posts=Count('post')).filter( num_posts__gt=0)
def show_tags(context): tag_list = Tag.objects.annotate(num_posts=Count('post')).filter( num_posts__gt=0) return { 'tag_list': tag_list, }
def get_category_list(): '''返回分类列表''' return Category.objects.annotate(total_num=Count('article')).filter( total_num__gt=0)
def ajax_report05(request): data = Occurrence.objects.filter(type__isnull=False)\ .values_list('type__name')\ .annotate(Count('type')) return JsonHttpResponse(list(data))
def get_queryset(self): return self.project.get_stages().annotate( deployment_count=Count('deployment'), host_count=Count('hosts'))
def get_categories(): # 别忘了在顶部引入 Category 类 return Category.objects.annotate(num_posts=Count('post')).filter( num_posts__gt=0)
def get_categories(): categories = Category.objects.all().annotate( post_count=Count('post')).filter(post_count__gt=0) return categories
def get_queryset(self): return Bookmark.objects.annotate(num_likes=Count('likes'))
def total_records(self): query = self.get_query() return query.aggregate(Count('id'))['id__count']
def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) verbose_teams = [] if self.teams and self.season_games: for team in self.teams: season_team_games = self.season_games.filter( Q(home_team=team.id) | Q(visitor_team=team.id), ) season_game_agg = season_team_games.aggregate( won=Count( "id", filter=Q( home_team=team.id, home_team_score__gt=F("visitor_team_score"), ), ) + Count( "id", filter=Q( visitor_team=team.id, visitor_team_score__gt=F("home_team_score"), ), ), lost=Count( "id", filter=Q( home_team=team.id, home_team_score__lt=F("visitor_team_score"), ), ) + Count( "id", filter=Q( visitor_team=team.id, visitor_team_score__lt=F("home_team_score"), ), ), tie=Count("id", filter=Q(home_team_score=F("visitor_team_score"))), ) if season_game_agg.get("won") or season_game_agg.get("lost"): won_lost_ratio = 1 - season_game_agg.get("lost") / float( season_game_agg.get("won") + season_game_agg.get("lost") ) else: won_lost_ratio = 0 verbose_teams.append( { "db_team": team, "city": CityChoices(team.city).label, "stadium": StadiumChoices(team.stadium).label, "won": season_game_agg.get("won") or 0, "lost": season_game_agg.get("lost") or 0, "tie": season_game_agg.get("tie") or 0, "won_lost_ratio": won_lost_ratio, } ) context.update( { "verbose_teams": sorted( verbose_teams, key=lambda i: (i["won"], i["tie"], i["lost"]), reverse=True, ), } ) return context
def _build_context(request): nav_path = [('Home', '/'), ('Syslogger', reverse('logger_index'))] results = [] context = {} aggregates = {} if request.GET: query_dict = request.GET.copy() form = LoggerGroupSearchForm(query_dict) if form.is_valid(): results = LogMessage.objects.filter( time__gte=form.cleaned_data['timestamp_from'], time__lte=form.cleaned_data['timestamp_to'], ).select_related() if form.cleaned_data.get('priority', None): priority_keyword = form.cleaned_data['priority'] if not isinstance(form.cleaned_data['priority'], list): priority_keyword = [form.cleaned_data['priority']] results = results.filter( newpriority__keyword__in=priority_keyword) if form.cleaned_data.get('mnemonic', None): message_type_mnemonic = form.cleaned_data['mnemonic'] if not isinstance(form.cleaned_data['mnemonic'], list): message_type_mnemonic = [form.cleaned_data['mnemonic']] results = results.filter( type__mnemonic__in=message_type_mnemonic) if form.cleaned_data.get('facility', None): message_type_facility = form.cleaned_data['facility'] if not isinstance(form.cleaned_data['facility'], list): message_type_facility = [form.cleaned_data['facility']] results = results.filter( type__facility__in=message_type_facility) if form.cleaned_data["category"]: categories = form.cleaned_data['category'] if not isinstance(form.cleaned_data['category'], list): categories = [form.cleaned_data['category']] results = results.filter(origin__category__in=categories) if 'origin' in form.cleaned_data and form.cleaned_data['origin']: origin_name = form.cleaned_data['origin'] if not isinstance(form.cleaned_data['origin'], list): origin_name = [form.cleaned_data['origin']] results = results.filter(origin__name__in=origin_name) priorities = results.values('newpriority__keyword').annotate( sum=Count('newpriority__keyword')) priorities_headers = ['Priority'] message_types = results.values( 'type__facility', 'type__priority__keyword', 'type__mnemonic').annotate(sum=Count('type')) message_types_headers = ['Facility', 'Priority', 'State'] origins = results.values('origin__name').annotate( sum=Count('origin__name')) origins_headers = ['Origin'] aggregates.update({ 'Priorities': { 'values': priorities, 'headers': priorities_headers, 'colspan': 1, } }) aggregates.update({ 'Type': { 'values': message_types, 'headers': message_types_headers, 'colspan': 3, } }) aggregates.update({ 'Origin': { 'values': origins, 'headers': origins_headers, 'colspan': 1, } }) def _update_show_log_context(value, results): if value: context.update({'log_messages': results}) context.update({'show_log': value}) form.data = form.data.copy() # mutable QueryDict, yes please form.data['show_log'] = value if form.cleaned_data.get('show_log', None): show_log = bool(form.cleaned_data['show_log']) _update_show_log_context(show_log, results) if len(priorities) <= 1 and len(origins) <= 1: _update_show_log_context(True, results) elif len(message_types) <= 1 and len(priorities) <= 1: _update_show_log_context(True, results) else: initial_context = { 'timestamp_from': (datetime.datetime.now() - datetime.timedelta(days=1)), 'timestamp_to': datetime.datetime.now(), } form = LoggerGroupSearchForm(initial=initial_context) strip_query_args = _strip_empty_arguments(request) strip_query_args = strip_query_args.urlencode() if strip_query_args else "" context.update({ 'form': form, 'bookmark': "{0}?{1}".format(reverse(index), strip_query_args), 'aggregates': aggregates, 'timestamp': datetime.datetime.now().strftime(DATEFORMAT), 'domain_strip': json.dumps(DOMAIN_SUFFICES), 'navpath': nav_path, 'title': create_title(nav_path), }) return context
def get_categories(): #return Category.objects.all() return Category.objects.annotate(num_posts=Count('post')).filter( num_posts__gt=0)
def school_spreadsheet_downloads(request, conference_slug, school_slug): conference = get_object_or_404(Conference, url_name=conference_slug) school = get_object_or_404(School, url_name=school_slug) if school_authenticate(request, conference, school): response = HttpResponse(mimetype='text/csv') writer = UnicodeCSVWriter(response) if 'country-committee-assignments' in request.GET: response['Content-Disposition'] = 'attachment; filename=country-committee-assignments-' + conference_slug + ".csv" committees = Committee.objects.filter(conference=conference) countries = Country.objects.filter(conference=conference) headers = ['Country'] for committee in committees: headers.append(committee.name) writer.writerow(headers) counts = DelegatePosition.objects.values('committee', 'country').annotate(count=Count('id')) count_dict = dict() for item in counts: count_dict[(item['country'], item['committee'])] = item['count'] for country in countries: row = [country.name] for committee in committees: row.append(str(count_dict.get((country.pk, committee.pk), 0))) writer.writerow(row) else: raise Http404 else: raise Http404 return response
def run(scheduled_process: ScheduledProcess) -> None: retention_settings = RetentionSettings.get_solo() if retention_settings.data_retention_in_hours == RetentionSettings.RETENTION_NONE: scheduled_process.disable() # Changing the retention settings in the admin will re-activate it again. return # These models should be rotated with retention. Dict value is the datetime field used. ITEM_COUNT_PER_HOUR = 2 MODELS_TO_CLEANUP = { DsmrReading.objects.processed(): 'timestamp', ElectricityConsumption.objects.all(): 'read_at', GasConsumption.objects.all(): 'read_at', } retention_date = timezone.now() - timezone.timedelta(hours=retention_settings.data_retention_in_hours) data_to_clean_up = False # We need to force UTC here, to avoid AmbiguousTimeError's on DST changes. timezone.activate(pytz.UTC) for base_queryset, datetime_field in MODELS_TO_CLEANUP.items(): hours_to_cleanup = base_queryset.filter( **{'{}__lt'.format(datetime_field): retention_date} ).annotate( item_hour=TruncHour(datetime_field) ).values('item_hour').annotate( item_count=Count('id') ).order_by().filter( item_count__gt=ITEM_COUNT_PER_HOUR ).order_by('item_hour').values_list( 'item_hour', flat=True )[:settings.DSMRREADER_RETENTION_MAX_CLEANUP_HOURS_PER_RUN] hours_to_cleanup = list(hours_to_cleanup) # Force evaluation. if not hours_to_cleanup: continue data_to_clean_up = True for current_hour in hours_to_cleanup: # Fetch all data per hour. data_set = base_queryset.filter( **{ '{}__gte'.format(datetime_field): current_hour, '{}__lt'.format(datetime_field): current_hour + timezone.timedelta(hours=1), } ) # Extract the first/last item, so we can exclude it. # NOTE: Want to alter this? Please update ITEM_COUNT_PER_HOUR above as well! keeper_pks = [ data_set.order_by(datetime_field)[0].pk, data_set.order_by('-{}'.format(datetime_field))[0].pk ] # Now drop all others. logger.debug('Retention: Cleaning up: %s (%s)', current_hour, data_set[0].__class__.__name__) data_set.exclude(pk__in=keeper_pks).delete() timezone.deactivate() # Delay for a bit, as there is nothing to do. if not data_to_clean_up: scheduled_process.delay(hours=12)
def get_queryset(self): return Comment.objects.annotate(num_likes=Count('likes'))
def get_tags(): # 记得在顶部引入 Tag model return Tag.objects.annotate(num_posts=Count('post')).filter( num_posts__gt=0)
def get_annotated_groups(): """Return an annotated set of groups for the tenant.""" return Group.objects.annotate( principalCount=Count("principals", distinct=True), policyCount=Count("policies", distinct=True) )
def irc(request, page=1, link_page=1): # Set the cookie if request.POST: if request.POST.has_key('word') and hashlib.sha224( request.POST['word']).hexdigest() == MAGIC_WORD: r = HttpResponseRedirect('.') r.set_cookie("irctoken", COOKIE_TOKEN, 60 * 60 * 24 * 356 * 100) return r irc_token = request.COOKIES.get("irctoken", "") if irc_token == COOKIE_TOKEN: if request.POST.has_key('term'): q = Irc.objects.all().filter( message__icontains=request.POST['term'], channel__iexact=CHANNEL).order_by('time').reverse() else: from django.db.models import Q q = Irc.objects.filter( Q(channel__iexact=CHANNEL) | Q(msg_type='Q')).order_by('time').reverse() p = Paginator(q, 100) links = Link.objects.all().filter( irc__channel=CHANNEL).order_by('id').reverse() links_p = Paginator(links, 10) # all time: karma = Karma.objects.all().filter( channel__iexact=CHANNEL, time__year=datetime.now().year).values('nick').annotate( karma=Count('nick')).order_by('-karma')[:5] # this week: week_start = date.today() - timedelta(days=date.today().weekday()) karma_week = Karma.objects.filter( time__gte=week_start, channel__iexact=CHANNEL, time__year=datetime.now().year).values('nick').annotate( karma=Count('nick')).order_by('-karma')[:5] # this month: month_start = date.today() - timedelta(days=date.today().day - 1) karma_month = Karma.objects.filter( time__gte=month_start, channel__iexact=CHANNEL, time__year=datetime.now().year).values('nick').annotate( karma=Count('nick')).order_by('-karma')[:5] return render_to_response('irc.html', { 'log': p.page(page), 'links': links_p.page(link_page), 'karma': karma, 'karma_week': karma_week, 'karma_month': karma_month }, context_instance=RequestContext(request)) else: return render_to_response('irc_access.html', { 'hello': "aaa", }, context_instance=RequestContext(request))
def annotate_roles_with_counts(queryset): """Annotate the queryset for roles with counts.""" return queryset.annotate(policyCount=Count("policies", distinct=True), accessCount=Count("access", distinct=True))
def get_tag_list(): '''返回标签列表''' return Tag.objects.annotate(total_num=Count('article')).filter( total_num__gt=0)
def viewBugProviders(request): providers = BugProvider.objects.annotate(size=Count('bug')) return render(request, 'providers/index.html', {'providers': providers})
def ajax_report04(request): data = Occurrence.objects.filter(region__isnull=False)\ .values_list('neighborhood')\ .annotate(Count('neighborhood')) return JsonHttpResponse(list(data))
def calcula_menu(user, path): if not user.is_authenticated: return #mire a quins grups està aquest usuari: al = Group.objects.get_or_create(name='alumne')[0] in user.groups.all() di = not al and Group.objects.get_or_create( name='direcció')[0] in user.groups.all() pr = not al and Group.objects.get_or_create( name='professors')[0] in user.groups.all() pl = not al and Group.objects.get_or_create( name='professional')[0] in user.groups.all() co = not al and Group.objects.get_or_create( name='consergeria')[0] in user.groups.all() pg = not al and Group.objects.get_or_create( name='psicopedagog')[0] in user.groups.all() so = not al and Group.objects.get_or_create( name='sortides')[0] in user.groups.all() tu = not al and pr and ( User2Professor(user).tutor_set.exists() or User2Professor(user).tutorindividualitzat_set.exists()) tots = di or pr or pl or co or al or pg #Comprovar si té missatges sense llegir nMissatges = user.destinatari_set.filter( moment_lectura__isnull=True).count() fa2segons = datetime.now() - timedelta(seconds=2) nMissatgesDelta = user.destinatari_set.filter( moment_lectura__gte=fa2segons).count() #Comprovar si té expulsions sense tramitar o cal fer expulsions per acumulació teExpulsionsSenseTramitar = False if pr: professor = User2Professor(user) teExpulsionsSenseTramitar = professor.expulsio_set.exclude( tramitacio_finalitzada=True).exists() #Acumulació Incidències if settings.CUSTOM_INCIDENCIES_PROVOQUEN_EXPULSIO and not teExpulsionsSenseTramitar: professional = User2Professional(user) teExpulsionsSenseTramitar = (Alumne.objects.order_by().filter( incidencia__professional=professional, incidencia__tipus__es_informativa=False, incidencia__gestionada_pel_tutor=False, incidencia__es_vigent=True).annotate( n=Count('incidencia')).filter(n__gte=3).exists()) #Comprovar si hi ha una qualitativa oberta hiHaUnaQualitativaOberta = False if pr: from aula.apps.avaluacioQualitativa.models import AvaluacioQualitativa hiHaUnaQualitativaOberta = AvaluacioQualitativa.objects.filter( data_obrir_avaluacio__lte=date.today(), data_tancar_avaluacio__gte=date.today()).exists() menu = { 'items': [], 'subitems': [], 'subsubitems': [], } try: nom_path = resolve(path).url_name except: return menu menu["esalumne"] = al if al: alumneuser = AlumneUser.objects.get(id=user.id) alumne = alumneuser.getAlumne() menu["nomusuari"] = u"Família de {alumne}".format(alumne=alumne.nom) else: menu["nomusuari"] = user.first_name or user.username try: menu_id, submenu_id, subsubmenu_id = nom_path.split('__')[:3] except: return menu arbre_tutoria = ( ("Actuacions", 'tutoria__actuacions__list', tu, None, None), ("Incidències de Tutor", 'tutoria__incidencies__list', tu, None, None), ("Justificar", 'tutoria__justificar__pre_justificar', tu, None, None), ("Cartes", 'tutoria__cartes_assistencia__gestio_cartes', tu, None, None), ("Alumnes", 'tutoria__alumnes__list', tu, None, None), ("Assistència", 'tutoria__assistencia__list_entre_dates', tu, None, None), ("Informe", 'tutoria__alumne__informe_setmanal', tu, None, None), ("Portal", 'tutoria__relacio_families__dades_relacio_families', tu, None, None), ("Seguiment", 'tutoria__seguiment_tutorial__formulari', tu, None, None), ) if settings.CUSTOM_TUTORS_INFORME: arbre_tutoria += (("Impressió Faltes i Incid.", 'tutoria__informe__informe_faltes_incidencies', tu, None, None), ) if hasattr(settings, 'CUSTOM_MODUL_SORTIDES_ACTIU' ) and settings.CUSTOM_MODUL_SORTIDES_ACTIU and (di or pr): professor = User2Professor(user) filtre = [ 'P', 'R', ] te_sortides_actives = (Sortida.objects.exclude(estat='E').filter( estat__in=filtre).filter(data_inici__gte=datetime.now()).filter( tutors_alumnes_convocats=professor).exists()) arbre_tutoria += (("Sortides", 'tutoria__justificarSortida__list', tu, (u'!', 'info') if te_sortides_actives else None, None), ) activarModulPresenciaSetmanal = False if hasattr(settings, 'CUSTOM_MODUL_PRESENCIA_SETMANAL_ACTIU' ) and settings.CUSTOM_MODUL_PRESENCIA_SETMANAL_ACTIU: activarModulPresenciaSetmanal = True arbre1 = ( #--Consergeria-------------------------------------------------------------------------- ('consergeria', 'Consergeria', 'consergeria__missatges__envia_tutors', co, None, ( ("Missatge a tutors", 'consergeria__missatges__envia_tutors', co, None, None), ("Incidència per retard", 'consergeria__incidencia__onbehalf', co, None, None), )), #--Aula-------------------------------------------------------------------------- # id, nom vista seg label ( 'aula', 'Aula', 'blanc__blanc__blanc', pr, teExpulsionsSenseTramitar or hiHaUnaQualitativaOberta, ( ("Presencia", 'aula__horari__horari', pr, None, None), #("Alumnes", 'aula__alumnes__alumnes_i_assignatures', pr, None, None ), ( "Alumnes", 'aula__alumnes__blanc', pr, None, (("Els meus alumnes", 'aula__alumnes__alumnes_i_assignatures', pr, None), ), ), ( "Incidències", 'aula__incidencies__blanc', pr, (u'!', 'info') if teExpulsionsSenseTramitar else None, ( ("Incidències", 'aula__incidencies__les_meves_incidencies', pr, (u'!', 'info') if teExpulsionsSenseTramitar else None), ("Nova Incidència (fora d'aula)", 'aula__incidencies__posa_incidencia', pr, None), ("Recull Expulsió", 'aula__incidencies__posa_expulsio', pr, None), ), ), ("Matèries", 'aula__materies__blanc', pr, None, (("Llistat entre dates", 'aula__materies__assistencia_llistat_entre_dates', pr, None), ("Calculadora UF", 'aula__materies__calculadora_uf', pr, None))), ("Qualitativa", 'aula__qualitativa__les_meves_avaulacions_qualitatives', pr, (u'!', 'info') if hiHaUnaQualitativaOberta else None, None), ("Pres. Setmanal", 'aula__presencia_setmanal__index', pr and activarModulPresenciaSetmanal, None, None), )), #--Tutoria-------------------------------------------------------------------------- ('tutoria', 'Tutoria', 'tutoria__actuacions__list', tu, None, arbre_tutoria), #--Gestió-------------------------------------------------------------------------- ('gestio', 'Gestió', 'gestio__reserva_aula__list', co or pl, None, ( ("Reserva Aula", 'gestio__reserva_aula__list', co or pl, None, None), ("Cerca Alumne", 'gestio__usuari__cerca', co or pl, None, None), ("Cerca Professor", 'gestio__professor__cerca', co or pl, None, None), )), #--psicopedagog-------------------------------------------------------------------------- ('psico', 'Psicopedagog', 'psico__informes_alumne__list', pg or di, None, ( ("Alumne", 'psico__informes_alumne__list', pg or di, None, None), ("Actuacions", 'psico__actuacions__list', pg or di, None, None), )), #--Coord.Pedag-------------------------------------------------------------------------- ( 'coordinacio_pedagogica', 'Coord.Pedag', 'coordinacio_pedagogica__qualitativa__blanc', di, None, ( ( "Qualitativa", 'coordinacio_pedagogica__qualitativa__blanc', di, None, ( ("Avaluacions", 'coordinacio_pedagogica__qualitativa__avaluacions', di, None), ("Items", 'coordinacio_pedagogica__qualitativa__items', di, None), ("Resultats", 'coordinacio_pedagogica__qualitativa__resultats_qualitatives', di, None), ), ), ("Seguiment Tutorial", "coordinacio_pedagogica__seguiment_tutorial__preguntes", di, None, None), ), ), #--Coord.Alumnes-------------------------------------------------------------------------- ( 'coordinacio_alumnes', 'Coord.Alumnes', 'coordinacio_alumnes__ranking__list', di, None, ( ("Alertes Incid.", 'coordinacio_alumnes__ranking__list', di, None, None), ("Alertes Assist.", 'coordinacio_alumnes__assistencia_alertes__llistat', di, None, None), ("Cartes", 'coordinacio_alumnes__assistencia__cartes', di, None, None), ("Sancions", 'coordinacio_alumnes__sancions__sancions', di, None, None), ("Passa llista grup", 'coordinacio_alumnes__presencia__passa_llista_a_un_grup_tria', di, None, None), ("Impressió Faltes i Incid.", 'coordinacio_alumnes__alumne__informe_faltes_incidencies', di, None, None), #[email protected] ("Indicadors", 'coordinacio_alumnes__indicadors__llistat', di, None, None), )), #--Coord.Profess.-------------------------------------------------------------------------- ( 'professorat', 'Coord.Prof', 'professorat__baixes__blanc', di, None, ( ( "Feina Absència", 'professorat__baixes__blanc', di, None, ( ('Posar feina', 'professorat__baixes__complement_formulari_tria', di, None), ('Imprimir feina', 'professorat__baixes__complement_formulari_impressio_tria', di, None), ), ), ( "Tutors", 'professorat__tutors__blanc', di, None, ( ('Tutors Grups', 'professorat__tutors__tutors_grups', di, None), ('Tutors individualitzat', 'professorat__tutors__tutors_individualitzats', di, None), ), ), ("Professors", 'professorat__professors__list', di, None, None), ("Estat Tramitació Exp.", 'professorat__expulsions__control_tramitacio', di, None, None), ), ), #--Administració-------------------------------------------------------------------------- ( 'administracio', 'Admin', 'administracio__sincronitza__blanc', di, None, ( ( "Sincronitza", 'administracio__sincronitza__blanc', di, None, ( ("Alumnes ESO/BAT", 'administracio__sincronitza__esfera', di, None), ("Alumnes Cicles", 'administracio__sincronitza__saga', di, None), ("Horaris", 'administracio__sincronitza__kronowin', di, None), ("Aules", 'gestio__aula__assignacomentari', di, None), ("Reprograma", 'administracio__sincronitza__regenerar_horaris', di, None), ), ), ("Reset Passwd", 'administracio__professorat__reset_passwd', di, None, None), ("Càrrega Inicial", 'administracio__configuracio__carrega_inicial', di, None, None), ("Promocions", 'administracio__promocions__llista', di, None, None), # ("Nou Alumne", 'administracio__alumnes__noualumne', di, None, None), # Aquesta pantalla encara no té implementada la seva funcionalitat. # Queda pendent acabar-la, o eliminar-la de l'aplicació. )), #--relacio_families-------------------------------------------------------------------------- ('relacio_families', u'Famílies', 'relacio_families__informe__el_meu_informe', al, None, ( ("Informe", 'relacio_families__informe__el_meu_informe', al, None, None), ("Paràmetres", 'relacio_families__configuracio__canvi_parametres', al, None, None), )), ) arbre2 = ( #--Varis-------------------------------------------------------------------------- ('varis', 'Ajuda i Avisos', 'varis__about__about' if al else 'varis__elmur__veure', tots, nMissatges > 0, ( ("Notificacions", 'varis__elmur__veure', di or pr or pl or co or pg, (nMissatgesDelta, 'info' if nMissatgesDelta < 10 else 'danger') if nMissatgesDelta > 0 else None, None), ("Missatge a professorat o PAS", 'varis__prof_i_pas__envia_professors_i_pas', pr or pl or co, None, None), ("Avisos de Seguretat", 'varis__avisos__envia_avis_administradors', tots, None, None), ("About", 'varis__about__about', tots, None, None), )), ) arbreSortides = () if hasattr(settings, 'CUSTOM_MODUL_SORTIDES_ACTIU' ) and settings.CUSTOM_MODUL_SORTIDES_ACTIU and (di or pr): filtre = [] socEquipDirectiu = User.objects.filter( pk=user.pk, groups__name='direcció').exists() socCoordinador = User.objects.filter(pk=user.pk, groups__name__in=['sortides' ]).exists() #si sóc equip directiu només les que tinguin estat 'R' (Revisada pel coordinador) if socEquipDirectiu: filtre.append('R') #si sóc coordinador de sortides només les que tinguin estat 'P' (Proposada) if socCoordinador: filtre.append('P') n_avis_sortides = (Sortida.objects.exclude(estat='E').filter( estat__in=filtre).distinct().count()) n_avis_sortides_meves = (Sortida.objects.filter(estat='E').filter( professor_que_proposa__pk=user.pk).distinct().count()) arbreSortides = ( #--Varis-------------------------------------------------------------------------- ('sortides', 'Activitats', 'sortides__meves__list', di or pr, n_avis_sortides + n_avis_sortides_meves > 0, ( (u"Històric", 'sortides__all__list', di or so, None, None), (u"Gestió d'activitats", 'sortides__gestio__list', di or so, ( n_avis_sortides, 'info', ) if n_avis_sortides > 0 else None, None), (u"Les meves propostes d'activitats", 'sortides__meves__list', pr, ( n_avis_sortides_meves, 'info', ) if n_avis_sortides_meves > 0 else None, None), )), ) arbre = arbre1 + arbreSortides + arbre2 for item_id, item_label, item_url, item_condicio, alerta, subitems in arbre: if not item_condicio: continue actiu = (menu_id == item_id) item = classebuida() item.label = item_label item.url = reverse(item_url) item.active = 'active' if actiu else '' item.alerta = alerta menu['items'].append(item) if actiu: for subitem_label, subitem_url, subitem__condicio, medalla, subsubitems in subitems: if not subitem__condicio: continue actiu = (submenu_id == subitem_url.split('__')[1]) subitem = classebuida() subitem.label = safe(subitem_label) subitem.url = reverse(subitem_url) subitem.active = 'active' if actiu else '' if medalla: omedalla = classebuida() omedalla.valor = medalla[0] omedalla.tipus = medalla[1] subitem.medalla = omedalla menu['subitems'].append(subitem) subitem.subsubitems = [] if subsubitems: for subitem_label, subitem_url, subitem_condicio, subitem_medalla in subsubitems: subsubitem = classebuida() subsubitem.label = safe(subitem_label) subsubitem.url = reverse(subitem_url) if subitem_medalla: omedalla = classebuida() omedalla.valor = subitem_medalla[0] omedalla.tipus = subitem_medalla[1] subsubitem.medalla = omedalla subitem.subsubitems.append(subsubitem) if actiu and subsubmenu_id == 'blanc': menu['subsubitems'] = subitem.subsubitems return menu
def ajax_report08(request): data = Occurrence.objects.filter(attended_public__isnull=False)\ .values_list('attended_public__name')\ .annotate(Count('attended_public')) return JsonHttpResponse(list(data))
def zhaoxiang_hotline_report_old(start,end,): """ @start:2018-06-01 00:00:00 @start:2018-06-30 23:59:59 SELECT TASKID 任务号, TO_CHAR ( PERCREATETIME, 'yyyy-mm-dd hh24:mi' ) 受理时间, HOTLINESN 热线 12345工单编号, CITYGRID.F_TASK_ISAPPROACH ( APPROACH ) 热线 12345工单类型, CITYGRID.F_SMRX_HOTJS_TYPE_BYID ( taskid ) 热线 12345业务类型, CITYGRID.F_SMRX_HOTJS_SOURCE_BYID ( taskid ) 热线 12345工单来源, CITYGRID.F_SMRX_HOTJS_ISREPEAT ( taskid ) 热线 12345工单是否重复, CITYGRID.F_SMRX_HOTJS_REWPID ( taskid ) 热线 12345重复工单号, CITYGRID.F_REC_MAINDEPTNAME ( EXECUTEDEPTCODE, DEPTCODE, TASKID ) 主责部门, CITYGRID.F_REC_THREEDEPTNAME ( EXECUTEDEPTCODE, DEPTCODE, TASKID ) 三级主责部门, CASE ISFIRSTCONTACT WHEN 1 THEN '是' WHEN 0 THEN '否' WHEN 2 THEN '未评价' END 先行联系, ResultTypename_bf 解决情况, ALLMANYINAME_BF 综合满意度, CaseValuationName 结案评判 FROM CITYGRID.T_TASKINFO main WHERE 1 = 1 AND discovertime BETWEEN TO_DATE ( '2018-06-01 00:00:00', 'yyyy-MM-dd HH24:mi:ss' ) AND TO_DATE ( '2018-06-30 23:59:59', 'yyyy-MM-dd HH24:mi:ss' ) AND InfoSourceid IN ( 10, 68 ) AND ( EXISTS ( SELECT 1 FROM CITYGRID.t_info_solving ts WHERE ( ts.executedeptcode = '20601' OR ts.DeptCode= '20601' ) AND ts.taskid= main.taskid AND ts.status != 3 ) OR main.deptcode = '20601') """ """ 居委 村委 ratio * 5 """ sovle= TInfoSolving.objects.filter( taskid=OuterRef('pk')).filter( Q(executedeptcode='20601')|Q(deptcode='20601')).exclude(status=3).only('id') q1 = TTaskinfo.objects.filter(discovertime__gte=start,discovertime__lte=end,).filter(infosourceid__in=[10,68],)\ .annotate(is_ok=Exists(sovle ))\ .filter( Q(deptcode='20601')| Q(is_ok=True)) q1 = q1.annotate(three = Func(F('executedeptcode'), F('deptcode'),F('taskid'),function='F_REC_THREEDEPTNAME')) a1 =q1.values('three').annotate(shou_li = Count('three')) q2 = TTaskinfo.objects.filter(endtime__gte=start,endtime__lte=end,).filter(infosourceid__in=[10,68],)\ .annotate(is_ok=Exists(sovle ))\ .filter( Q(deptcode='20601')| Q(is_ok=True)) q2 = q2.annotate(three = Func(F('executedeptcode'), F('deptcode'),F('taskid'),function='F_REC_THREEDEPTNAME')) a2_1=q2.values('three').annotate(sou_count =Count('three')) a2_2 =a2_1.annotate(first_yes=Sum( Case(When(isfirstcontact=1, then=1),default=0 ,output_field=IntegerField()) )) \ .annotate(first_no =Sum( Case(When(isfirstcontact=0, then=1),default=0 ,output_field=IntegerField()) )) #.annotate(first_total = F('first_yes')+F('first_no'))\ #.annotate(first_ratio=Case(When(first_total=0, then=1),default=F('first_yes')/F('first_total'),output_field=FloatField() )) a2_4= a2_2.annotate(real_solve=Sum(Case(When(casevaluationname='实际解决',then=1),default=0,output_field=IntegerField())))\ .annotate(jie_solve=Sum(Case(When(casevaluationname='解释说明',then=1),default=0,output_field=IntegerField()))) #.annotate(total_solve=F('real_solve')+F('jie_solve'))\ #.annotate(real_solve_ratio=Case(When(total_solve=0,then=0),default= F('real_solve')/ F('total_solve'),output_field=FloatField() ))\ #.annotate(jie_solve_ratio=Case(When(total_solve=0,then=0),default= F('jie_solve')/ F('total_solve'),output_field=FloatField() )) a2_5 =a2_4.annotate(man_yi=Sum(Case(When(allmanyiname_bf='满意',then=1),\ When(allmanyiname_bf='基本满意', then=0.8), When( allmanyiname_bf='一般',then=0.6 ), default=0, output_field=FloatField() )))\ .annotate( man_yi_total=Sum(Case(When( allmanyiname_bf__in=['满意','基本满意','一般','不满意'] ,then=1 ),default=0 ,output_field=IntegerField() )) ) \ #.annotate(man_yi_ratio = Case(When(man_yi_total=0,then=0),default= F('man_yi')/F('man_yi_total') , output_field=FloatField() )) a1=list(a1) a2=list(a2_5) out_dict = { 'a1':a1, 'a2':a2 } return out_dict
def show_categories(context): category_list = Category.objects.annotate(num_posts=Count('post')).filter( num_posts__gt=0) return { 'category_list': category_list, }
def get_attrs(self, item_list, user): attrs = super(DetailedProjectSerializer, self).get_attrs(item_list, user) project_ids = [i.id for i in item_list] platforms = ProjectPlatform.objects.filter( project_id__in=project_ids, ).values_list('project_id', 'platform') platforms_by_project = defaultdict(list) for project_id, platform in platforms: platforms_by_project[project_id].append(platform) num_issues_projects = Project.objects.filter( id__in=project_ids ).annotate(num_issues=Count('processingissue')) \ .values_list('id', 'num_issues') processing_issues_by_project = {} for project_id, num_issues in num_issues_projects: processing_issues_by_project[project_id] = num_issues latest_release_list = list( Release.objects.raw(""" SELECT lr.project_id as actual_project_id, r.* FROM ( SELECT ( SELECT lrr.id FROM sentry_release lrr JOIN sentry_release_project lrp ON lrp.release_id = lrr.id WHERE lrp.project_id = p.id ORDER BY COALESCE(lrr.date_released, lrr.date_added) DESC LIMIT 1 ) as release_id, p.id as project_id FROM sentry_project p WHERE p.id IN ({}) ) as lr JOIN sentry_release r ON r.id = lr.release_id """.format(', '.join(six.text_type(i.id) for i in item_list), ))) queryset = ProjectOption.objects.filter( project__in=item_list, key__in=self.OPTION_KEYS, ) options_by_project = defaultdict(dict) for option in queryset.iterator(): options_by_project[option.project_id][option.key] = option.value orgs = { d['id']: d for d in serialize(list(set( i.organization for i in item_list)), user) } latest_releases = { r.actual_project_id: d for r, d in zip(latest_release_list, serialize(latest_release_list, user)) } for item in item_list: attrs[item].update({ 'latest_release': latest_releases.get(item.id), 'org': orgs[six.text_type(item.organization_id)], 'options': options_by_project[item.id], 'platforms': platforms_by_project[item.id], 'processing_issues': processing_issues_by_project.get(item.id, 0), }) return attrs
def get_tags(): return Tag.objects.annotate(num_posts=Count('post')).filter( num_posts__gt=0)
def get_tags_with_no_tagged_items(self): return (self.get_queryset().annotate(tagged_items=Count("taggedblog") + Count("taggedpost")).filter( tagged_items=0))