コード例 #1
0
ファイル: views.py プロジェクト: webwin0/blog
def archive(request):

    base_qs = Entry.objects.filter(published=True)

    in_archive = base_qs.aggregate(count=Count('pk'), max=Max('pk'), min=Min('pk'), avg=Avg('pk'))

    month = backend.DatabaseOperations(connections).date_trunc_sql('month', 'created')
    per_month_count = base_qs.extra({'date':month}).values('date').annotate(count=Count('pk')).order_by('date')

    ctx = {
        'in_archive':in_archive,
        'per_month_count':per_month_count
    }

    return render_to_response('homepage/archive.html', ctx, context_instance=RequestContext(request))
コード例 #2
0
def get_awc_reports_pse(config, month, domain, show_test=False):
    selected_month = datetime(*month)
    last_30_days = (selected_month - relativedelta(days=30))
    last_months = (selected_month - relativedelta(months=1))
    last_three_months = (selected_month - relativedelta(months=3))
    last_day_of_selected_month = (
        selected_month + relativedelta(months=1)) - relativedelta(days=1)

    map_image_data = DailyAttendanceView.objects.filter(
        pse_date__range=(selected_month, last_day_of_selected_month),
        **config).values('awc_name', 'form_location_lat', 'form_location_long',
                         'image_name', 'doc_id',
                         'pse_date').order_by('-pse_date')

    kpi_data_tm = AggAwcMonthly.objects.filter(
        month=selected_month,
        **config).values('awc_name').annotate(days_open=Sum('awc_days_open'))
    kpi_data_lm = AggAwcMonthly.objects.filter(
        month=last_months,
        **config).values('awc_name').annotate(days_open=Sum('awc_days_open'))

    open_count_data = DailyAttendanceView.objects.filter(
        pse_date__range=(last_three_months, last_day_of_selected_month),
        **config).values('awc_name', 'pse_date').annotate(
            open_count=Sum('awc_open_count'), ).order_by('pse_date')

    daily_attendance = DailyAttendanceView.objects.filter(
        pse_date__range=(selected_month, last_day_of_selected_month),
        **config).values('awc_name', 'pse_date').annotate(
            avg_percent=Avg('attended_children_percent'),
            attended=Sum('attended_children'),
            eligible=Sum('eligible_children'))

    if not show_test:
        map_image_data = apply_exclude(domain, map_image_data)
        kpi_data_tm = apply_exclude(domain, kpi_data_tm)
        kpi_data_lm = apply_exclude(domain, kpi_data_lm)
        open_count_data = apply_exclude(domain, open_count_data)
        daily_attendance = apply_exclude(domain, daily_attendance)

    attended_children_chart = {}
    dates = [
        dt for dt in rrule(
            DAILY, dtstart=selected_month, until=last_day_of_selected_month)
    ]
    for date in dates:
        attended_children_chart[int(date.strftime("%s")) * 1000] = {
            'avg_percent': 0,
            'attended': 0,
            'eligible': 0
        }

    open_count_chart = {}
    for chart_row in open_count_data:
        first_day_of_week = chart_row['pse_date'] - timedelta(
            days=chart_row['pse_date'].isoweekday() - 1)
        pse_week = int(first_day_of_week.strftime("%s")) * 1000

        if pse_week in open_count_chart:
            open_count_chart[pse_week] += (chart_row['open_count'] or 0)
        else:
            open_count_chart[pse_week] = (chart_row['open_count'] or 0)

    for daily_attendance_row in daily_attendance:
        pse_day = int(daily_attendance_row['pse_date'].strftime("%s")) * 1000
        attended_children_chart[pse_day] = {
            'avg_percent': daily_attendance_row['avg_percent'] or 0,
            'attended': daily_attendance_row['attended'] or 0,
            'eligible': daily_attendance_row['eligible'] or 0
        }

    map_data = {}

    date_to_image_data = {}

    for map_row in map_image_data:
        lat = map_row['form_location_lat']
        long = map_row['form_location_long']
        awc_name = map_row['awc_name']
        image_name = map_row['image_name']
        doc_id = map_row['doc_id']
        pse_date = map_row['pse_date']
        if lat and long:
            key = doc_id.replace('-', '')
            map_data.update({
                key: {
                    'lat': float(lat),
                    'lng': float(long),
                    'focus': 'true',
                    'message': awc_name,
                }
            })
        if image_name:
            date_str = pse_date.strftime("%d/%m/%Y")
            date_to_image_data[date_str] = map_row

    images = []
    tmp_image = []

    for idx, date in enumerate(
            rrule(DAILY,
                  dtstart=selected_month,
                  until=last_day_of_selected_month)):
        date_str = date.strftime("%d/%m/%Y")
        image_data = date_to_image_data.get(date_str)

        if image_data:
            image_name = image_data['image_name']
            doc_id = image_data['doc_id']

            tmp_image.append({
                'id':
                idx,
                'image':
                absolute_reverse('api_form_attachment',
                                 args=(domain, doc_id, image_name)),
                'date':
                date_str
            })
        else:
            tmp_image.append({'id': idx, 'image': None, 'date': date_str})

        if (idx + 1) % 4 == 0:
            images.append(tmp_image)
            tmp_image = []

    if tmp_image:
        images.append(tmp_image)

    return {
        'kpi': [[{
            'label':
            _('AWC Days Open'),
            'help_text':
            _(("""
                        Total number of days the AWC is open in the given month.
                        The AWC is expected to be open 6 days a week (Not on Sundays and public holidays)
                        """)),
            'percent':
            percent_increase(
                'days_open',
                kpi_data_tm,
                kpi_data_lm,
            ),
            'value':
            get_value(kpi_data_tm, 'days_open'),
            'all':
            '',
            'format':
            'number',
            'frequency':
            'month',
            'color':
            'green' if percent_increase(
                'days_open',
                kpi_data_tm,
                kpi_data_lm,
            ) > 0 else 'red',
        }]],
        'charts':
        [[{
            'key':
            'AWC Days Open per week',
            'values':
            sorted([
                dict(x=x_val, y=y_val)
                for x_val, y_val in open_count_chart.iteritems()
            ],
                   key=lambda d: d['x']),
            "strokeWidth":
            2,
            "classed":
            "dashed",
            "color":
            BLUE
        }],
         [
             {
                 'key':
                 'PSE - Daily Attendance',
                 'values':
                 sorted([
                     dict(x=x_val,
                          y=y_val['avg_percent'],
                          attended=y_val['attended'],
                          eligible=y_val['eligible'])
                     for x_val, y_val in attended_children_chart.iteritems()
                 ],
                        key=lambda d: d['x']),
                 "strokeWidth":
                 2,
                 "classed":
                 "dashed",
                 "color":
                 BLUE
             },
         ]],
        'map': {
            'markers': map_data,
        },
        'images':
        images
    }
コード例 #3
0
 def calculate_reputation(self):
     reputation = self.implementation_set.aggregate(
         average=Avg('reputation'))['average'] or None
     self.reputation = reputation
     self.save()
コード例 #4
0
 def average_rating(self, product_id):
     reviews = self.reviews(product_id)
     return reviews.aggregate(Avg('rating'))
コード例 #5
0
ファイル: models.py プロジェクト: somagh/gatuino-scoreboard
 def score(self):
     return self.assignees.aggregate(score=Avg('score'))['score'] or 0
コード例 #6
0
    def generate_public_stats(self, output_html: str) -> None:

        stats_dict = {
            "n_archives":
            Archive.objects.filter(public=True).count(),
            "n_galleries":
            Gallery.objects.filter(public=True).count(),
            "archive":
            Archive.objects.filter(public=True).filter(
                filesize__gt=0).aggregate(Avg('filesize'), Max('filesize'),
                                          Min('filesize'), Sum('filesize'),
                                          Avg('filecount'), Sum('filecount')),
            "gallery":
            Gallery.objects.filter(public=True).filter(
                filesize__gt=0).aggregate(Avg('filesize'), Max('filesize'),
                                          Min('filesize'), Sum('filesize'),
                                          Avg('filecount'), Sum('filecount')),
            "n_tags":
            Tag.objects.filter(gallery__public=True).distinct().count(),
            "top_10_tags":
            Tag.objects.filter(gallery__public=True).distinct().annotate(
                num_archive=Count('gallery')).order_by('-num_archive')[:10],
            "top_10_artist_tags":
            Tag.objects.filter(
                scope='artist',
                gallery__public=True).distinct().annotate(num_archive=Count(
                    'gallery')).order_by('-num_archive')[:10],
            "top_10_parody_tags":
            Tag.objects.filter(
                scope='parody', gallery__public=True).distinct().annotate(
                    num_archive=Count('gallery')).order_by('-num_archive')[:10]
        }

        # Per category
        providers = Gallery.objects.filter(public=True).values_list(
            'provider', flat=True).distinct()

        providers_dict = {}

        for provider in providers:
            providers_dict[provider] = {
                'n_galleries':
                Gallery.objects.filter(public=True, provider=provider).count(),
                'gallery':
                Gallery.objects.filter(public=True).filter(
                    filesize__gt=0,
                    provider=provider).aggregate(Avg('filesize'),
                                                 Max('filesize'),
                                                 Min('filesize'),
                                                 Sum('filesize'),
                                                 Avg('filecount'),
                                                 Sum('filecount'))
            }

        # Per category
        categories = Gallery.objects.filter(public=True).values_list(
            'category', flat=True).distinct()

        categories_dict = {}

        for category in categories:
            categories_dict[category] = {
                'n_galleries':
                Gallery.objects.filter(public=True, category=category).count(),
                'gallery':
                Gallery.objects.filter(public=True).filter(
                    filesize__gt=0,
                    category=category).aggregate(Avg('filesize'),
                                                 Max('filesize'),
                                                 Min('filesize'),
                                                 Sum('filesize'),
                                                 Avg('filecount'),
                                                 Sum('filecount'))
            }

        # Per language tag
        languages = Tag.objects.filter(scope='language').exclude(
            scope='language',
            name='translated').annotate(num_gallery=Count('gallery')).order_by(
                '-num_gallery').values_list('name', flat=True).distinct()

        languages_dict = {}

        languages_dict['untranslated'] = {
            'n_galleries':
            Gallery.objects.filter(public=True).exclude(
                tags__scope='language').distinct().count(),
            'gallery':
            Gallery.objects.filter(public=True).filter(
                filesize__gt=0, tags__scope='language').distinct().aggregate(
                    Avg('filesize'), Max('filesize'), Min('filesize'),
                    Sum('filesize'), Avg('filecount'), Sum('filecount'))
        }

        for language in languages:
            languages_dict[language] = {
                'n_galleries':
                Gallery.objects.filter(public=True).filter(
                    tags__scope='language',
                    tags__name=language).distinct().count(),
                'gallery':
                Gallery.objects.filter(public=True).filter(
                    filesize__gt=0,
                    tags__scope='language',
                    tags__name=language).distinct().aggregate(
                        Avg('filesize'), Max('filesize'), Min('filesize'),
                        Sum('filesize'), Avg('filecount'), Sum('filecount'))
            }

        d = {
            'stats': stats_dict,
            'gallery_categories': categories_dict,
            'gallery_languages': languages_dict,
            'gallery_providers': providers_dict
        }

        content = render_to_string("viewer/static_public_stats.html", d)

        # TODO: User argument
        content = content.replace("/meta/static/",
                                  "https://static.chaika.moe/static/")
        content = content.replace("/meta/", "/")
        content = content.replace("output.4d110389f894.css",
                                  "output.c5266df505ca.css")

        with open(output_html, 'w', encoding='utf8') as static_file:
            static_file.write(content)
コード例 #7
0
ファイル: __init__.py プロジェクト: adelsonllima/djangoplus
    def _calculate(self,
                   vertical_key=None,
                   horizontal_key=None,
                   aggregate=None):
        verbose_name = get_metadata(self.model, 'verbose_name')
        if not vertical_key:
            if aggregate:
                value = 0
                mode, attr = aggregate
                if mode == 'sum':
                    value = self.aggregate(Sum(attr)).get(
                        '{}__sum'.format(attr)) or 0
                elif mode == 'avg':
                    value = self.aggregate(Avg(attr)).get(
                        '{}__avg'.format(attr)) or 0
                aggregation_field = get_field(self.model, attr)
                if type(aggregation_field).__name__ in ('DecimalField', ):
                    value = Decimal(value)
                return value
            return None
        vertical_field = get_field(self.model, vertical_key)
        if type(vertical_field).__name__ in ('DateField', 'DateTimeField'):
            months = [
                'Jan', 'Fev', 'Mar', 'Abr', 'Mai', 'Jun', 'Jul', 'Ago', 'Set',
                'Out', 'Nov', 'Dez'
            ]

            if horizontal_key:
                iterator_model = get_field(self.model,
                                           horizontal_key).remote_field.model
                iterators = iterator_model.objects.filter(
                    pk__in=self.values_list(horizontal_key, flat=True).
                    order_by(horizontal_key).distinct())
                horizontal_field = get_field(self.model, horizontal_key)
                title = '{} anual por {}'.format(verbose_name,
                                                 horizontal_field.verbose_name)
                statistics = QueryStatistics(title)
                for iterator in iterators:
                    group = str(iterator)
                    for i, month in enumerate(months):
                        label = month
                        qs = self.filter(
                            **{
                                '{}__month'.format(vertical_key): i + 1,
                                horizontal_key: iterator.pk
                            })
                        statistics.add(label, qs, qs.count(), group)
                return statistics
            else:
                title = '{} Anual'.format(verbose_name)
                statistics = QueryStatistics(title)
                for i, month in enumerate(months):
                    label = month
                    avg = False
                    if aggregate:
                        total = 0
                        mode, attr = aggregate
                        if mode == 'sum':
                            qs = self.filter(
                                **{'{}__month'.format(vertical_key): i + 1})
                            total = qs.aggregate(Sum(attr)).get(
                                '{}__sum'.format(attr)) or 0
                        elif mode == 'avg':
                            avg = True
                            qs = self.filter(
                                **{'{}__month'.format(vertical_key): i + 1})
                            total = qs.aggregate(Avg(attr)).get(
                                '{}__avg'.format(attr)) or 0
                        aggregation_field = get_field(self.model, attr)
                        if type(aggregation_field).__name__ in (
                                'DecimalField', ):
                            total = Decimal(total)
                    else:
                        qs = self.filter(
                            **{'{}__month'.format(vertical_key): i + 1})
                        total = qs.count()
                    statistics.add(label, qs, total, avg=avg)
                return statistics
        else:
            if vertical_field.choices:
                used_choices = self.values_list(
                    vertical_key, flat=True).order_by(vertical_key).distinct()
                vertical_choices = []
                for choice in vertical_field.choices:
                    if choice[0] in used_choices:
                        vertical_choices.append(choice)
            elif vertical_field.__class__.__name__ == 'BooleanField':
                vertical_choices = [(True, 'Sim'), (False, 'Não')]
            else:
                vertical_model = find_model(self.model, vertical_key)
                if vertical_model == self.model:
                    vertical_choices = [(getattr(o, vertical_key), str(o))
                                        for o in vertical_model.objects.all()]
                else:
                    vertical_choices = [
                        (o.pk, str(o)) for o in vertical_model.objects.filter(
                            id__in=self.values_list(vertical_key, flat=True))
                    ]
            if horizontal_key:
                horizontal_choices = []
                horizontal_field = get_field(self.model, horizontal_key)
                if horizontal_field.choices:
                    used_choices = self.values_list(
                        horizontal_key,
                        flat=True).order_by(horizontal_key).distinct()
                    horizontal_choices = []
                    for choice in horizontal_field.choices:
                        if choice[0] in used_choices:
                            horizontal_choices.append(choice)
                elif horizontal_field.__class__.__name__ == 'BooleanField':
                    vertical_choices = [(True, 'Sim'), (False, 'Não')]
                else:
                    horizontal_model = find_model(self.model, horizontal_key)
                    horizontal_choices = [
                        (o.pk, str(o))
                        for o in horizontal_model.objects.filter(
                            id__in=self.values_list(horizontal_key, flat=True))
                    ]

                title = '{} por {} e {}'.format(
                    verbose_name, vertical_field.verbose_name.lower(),
                    horizontal_field.verbose_name)
                statistics = QueryStatistics(title)
                for vertical_choice in vertical_choices:
                    group = vertical_choice[1]
                    avg = False
                    for horizontal_choice in horizontal_choices:
                        label = horizontal_choice[1]
                        value = 0
                        lookup = {
                            vertical_key: vertical_choice[0] or None,
                            horizontal_key: horizontal_choice[0] or None
                        }
                        qs = self.filter(**lookup).distinct()
                        if aggregate:
                            mode, attr = aggregate
                            if mode == 'sum':
                                value = qs.aggregate(Sum(attr)).get(
                                    '{}__sum'.format(attr)) or 0
                            elif mode == 'avg':
                                avg = True
                                value = qs.aggregate(Avg(attr)).get(
                                    '{}__avg'.format(attr)) or 0
                            aggregation_field = get_field(self.model, attr)
                            if type(aggregation_field).__name__ in (
                                    'DecimalField', ):
                                value = Decimal(value)
                        else:
                            value = qs.values('id').count()
                        statistics.add(label, qs, value, group, avg=avg)
                return statistics
            else:
                title = '{} por {}'.format(verbose_name,
                                           vertical_field.verbose_name)
                statistics = QueryStatistics(title)
                avg = False
                for vertical_choice in vertical_choices:
                    label = vertical_choice[1]
                    lookup = {vertical_key: vertical_choice[0] or None}
                    value = 0
                    qs = self.filter(**lookup).distinct()
                    if aggregate:
                        mode, attr = aggregate
                        if mode == 'sum':
                            value = qs.aggregate(Sum(attr)).get(
                                '{}__sum'.format(attr)) or 0
                        elif mode == 'avg':
                            avg = True
                            value = qs.aggregate(Avg(attr)).get(
                                '{}__avg'.format(attr)) or 0
                        aggregation_field = get_field(self.model, attr)
                        if type(aggregation_field).__name__ in (
                                'DecimalField', ):
                            value = Decimal(value)
                    else:
                        value = qs.count()
                    statistics.add(label, qs, value, avg=avg)
                return statistics
コード例 #8
0
 def get_group_with_student_count(self):
     return self.get_queryset().annotate(stq=Count('student'),
                                         stavg=Avg('student__age'),
                                         stmax=Max('student__age'),
                                         stmin=Min('student__age'))
コード例 #9
0
 def func(obj):
     return CriterionResponse.objects.filter(evaluation__event=obj,
                                             criterion=get_criterion(item))\
                                             .aggregate(avg=Avg("response"))["avg"]
コード例 #10
0
ファイル: views.py プロジェクト: janhavitanna/Repovey
def analyse(request, view_key=""):
    try:
        survey = Survey.objects.get(key=view_key)
    except BaseException as e:
        return error_jump(request)
    if not survey.user.id == request.user.id and not survey.is_collaborator(
            request.user):
        return error_jump(request, "unauthorized")

    questions = survey.questions.order_by('id_in_survey')
    raw_data = []

    geo_data = []
    geo_dict = {}
    gi = pygeoip.GeoIP(settings.GEO_DATA_PATH, pygeoip.MEMORY_CACHE)

    responses = Response.objects.filter(survey=survey)
    # how many people skipped each question marked as not required
    no_skipped = []

    respondents_data = []
    time_data = {}
    time_in_seconds = []
    date_dict = {}
    # analyse geoip data of responses

    for response in responses:
        date_str = response.dt_end.strftime('%d %B')
        time_in_seconds.append((response.dt_end - response.dt_start).seconds)
        if date_str not in date_dict:
            date_dict[date_str] = 1
        else:
            date_dict[date_str] += 1
        ip = str(response.ip_address)
        country = gi.country_name_by_addr(ip)
        if country not in geo_dict:
            geo_dict[country] = 1
        else:
            geo_dict[country] += 1
    try:
        time_data['seconds'] = time_in_seconds
        time_data['min'] = min(time_in_seconds)
        time_data['max'] = max(time_in_seconds)
        time_data['avg'] = sum(time_in_seconds) / len(time_in_seconds)
    except BaseException as e:
        pass

    # analyse number of daily responses
    for key, value in sorted(date_dict.items(), cmp=date_cmp):
        respondents_data.append([key, value])

    for key, value in geo_dict.items():
        geo_data.append([key, value])

    # analyse each question
    for question in questions:
        type = question.type
        answers = Answer.objects.filter(response__survey=survey,
                                        id_in_response=question.id_in_survey)
        # calc number of skipped responses for this question
        if not question.is_required:
            no_skipped.append(answers.filter(value__exact='').count())
        else:
            no_skipped.append(0)
            # after counting skipped questions, excluding all empty answers
        answers = answers.exclude(value__exact='')

        if type == 'paragraph' or type == 'text':
            # for paragraph and text question, only display all responses
            raw_data.append([answer.value for answer in answers])

        elif type == 'multiplechoice' or type == 'checkbox':
            # get choices
            if type == 'multiplechoice':
                choices = MultipleChoice.objects.filter(question=question)
            else:
                choices = CheckboxChoice.objects.filter(question=question)

            # init list to [0,0,0,0,0...]
            resp_count = [0] * len(choices)
            # calc number of responses for each choice
            for answer in answers:
                for choice in answer.value.split(','):
                    resp_count[int(choice)] += 1

            data_dict = []
            for choice, count in zip(choices, resp_count):
                data_dict.append([str(choice.label), int(count)])

            raw_data.append(data_dict)
        elif type == 'numeric':
            num_dict = {}
            num_dict['data'] = [float(answer.value) for answer in answers]
            num_dict['min_value'] = answers.aggregate(
                Min('value'))['value__min']
            num_dict['max_value'] = answers.aggregate(
                Max('value'))['value__max']
            num_dict['avg'] = answers.aggregate(Avg('value'))['value__avg']
            raw_data.append(num_dict)

        elif type == 'scale':
            data_dict = {}
            for answer in answers:
                if answer.value not in data_dict:
                    data_dict[answer.value] = 1
                else:
                    data_dict[answer.value] += 1
            tmp_list = []
            # sort according to key
            for key, value in sorted(data_dict.items(), cmp=string_cmp):
                tmp_list.append([str(key), float(value)])
            raw_data.append(tmp_list)
        elif type == 'date':
            raw_data.append(str(answer.value) for answer in answers)

    zipped = zip(questions, raw_data, no_skipped)

    dict = {
        'survey': survey,
        'questions': questions,
        'dt_start': datetime.now(),
        'responses': responses,
        'zipped': zipped,
        'time_data': time_data,
        'respondents_data': respondents_data,
        'geo_data': geo_data
    }
    return render_to_response('analyse.html', dict, RequestContext(request))
コード例 #11
0
    def get_context_data(self, **kwargs):
        context = super(DashboardView, self).get_context_data(**kwargs)
        # context['verbose_name'] = 'Dashboard'

        day_count = 10
        xdata_line = []
        days = day_count
        while days >= 0:
            xdata_line.append(
                int(
                    time.mktime(
                        (datetime.datetime.now() -
                         datetime.timedelta(days=days)).timetuple()) * 1000))
            days -= 1
        tooltip_date_line = "%d %b %Y %H:%M:%S %p"
        extra_serie_line = {
            "tooltip": {
                "y_start": "",
                "y_end": ""
            },
            "date_format": tooltip_date_line
        }

        chartdata_sensor = {'x': xdata_line}
        chartseq_sensor = 1
        for sensor_type in PlantBase.sensor_type:
            if sensor_type[0] is not None:
                chartdata_sensor['name%s' % (chartseq_sensor)] = sensor_type[1]
                chartdata_sensor['extra%s' %
                                 (chartseq_sensor)] = extra_serie_line
                ydata_sensor = []
                days = day_count
                while days >= 0:
                    last_day = (
                        datetime.datetime.now() -
                        datetime.timedelta(days=days)).strftime('%Y-%m-%d')
                    val = PlantSensorLogDetail.objects.filter(
                        kode=sensor_type[0],
                        plant_sensor_log__dt__range=('%s 00:00:00' %
                                                     (last_day),
                                                     '%s 23:59:59' %
                                                     (last_day))).aggregate(
                                                         Avg('val'))
                    ydata_sensor.append(val['val__avg'] or 0.0)
                    days -= 1
                chartdata_sensor['y%s' % (chartseq_sensor)] = ydata_sensor
                chartseq_sensor += 1

        context['charttype_sensor'] = 'lineChart'
        context['chartdata_sensor'] = chartdata_sensor
        context['extra_sensor'] = {
            'x_is_date': True,
            'x_axis_format': '%d/%b/%y',
        }

        xdata_plant = []
        ydata_plant = []
        extra_serie_plant = {"tooltip": {"y_start": "", "y_end": ""}}
        plant_types = PlantPlant.objects.filter(active=True)
        valAll = PlantRackPoint.objects.aggregate(Count('plant_plant'))
        for plant_type in plant_types:
            val = PlantRackPoint.objects.filter(
                plant_plant=plant_type).aggregate(Count('plant_plant'))
            if val['plant_plant__count']:
                xdata_plant.append("%s (%s/%s)" %
                                   (plant_type.kode, val['plant_plant__count'],
                                    valAll['plant_plant__count']))
                ydata_plant.append(val['plant_plant__count'])
        chartdata_plant = {
            'x': xdata_plant,
            'y1': ydata_plant,
            'extra1': extra_serie_plant
        }

        context['charttype_plant'] = 'pieChart'
        context['chartdata_plant'] = chartdata_plant

        return context
コード例 #12
0
def average_rate(card) -> float:
    return (card.card_rate.all().aggregate(Avg('rate'))
            if card.card_rate.all() else 0)
コード例 #13
0
class DonationAggViewSet(ListModelMixin, GenericViewSet):
    queryset = (Donation.objects.select_related('organization',
                                                'donator').all())
    serializer_class = DonationAggSerializer
    GROUP_FIELDS = {
        'wojewodztwo': 'organization__wojewodztwo',
        'powiat': 'organization__powiat',
        'gmina': 'organization__gmina',
        'city': 'organization__city',
        'donator': 'donator__name',
        'date': 'date',
    }
    NON_F_FIELDS = ['date']
    AGG_FIELDS = {
        'sum_money': Sum('money'),
        'avg_money': Avg('money'),
        'min_money': Min('money'),
        'max_money': Max('money'),
        'count_donations': Count('id'),
    }
    ALL_FIELDS = GROUP_FIELDS.copy()
    ALL_FIELDS.update({key: key for key in AGG_FIELDS.keys()})

    def get_group_fields(self):
        group = self.request.query_params.get('group', '')
        striped_fields = [field.strip() for field in group.split(',')]
        fields = {
            field: self.ALL_FIELDS.get(field)
            for field in striped_fields
        }
        return {
            key: value
            for key, value in fields.items() if value is not None
        }

    def get_ordering_fields(self):
        group = self.request.query_params.get('ordering', '')
        striped_fields = [field.strip() for field in group.split(',')]
        fields = [
            '-' + self.ALL_FIELDS.get(field[1:])
            if field[0:1] == '-' else self.ALL_FIELDS.get(field)
            for field in striped_fields
        ]
        return [field for field in fields if field]

    def get_filter_fields(self):
        params = self.request.query_params
        fields = {
            self.ALL_FIELDS.get(field.strip()): value
            for field, value in params.items()
        }
        return {key: value for key, value in fields.items() if key is not None}

    def get_queryset(self):
        fields = self.get_group_fields()
        annotate_fields = {name: F(key) for name, key in fields.items()}
        aliased_fields = {
            name: key
            for name, key in annotate_fields.items()
            if name not in self.NON_F_FIELDS
        }
        return (super(
            DonationAggViewSet,
            self).get_queryset().annotate(**aliased_fields).values(
                *annotate_fields.keys()).annotate(**self.AGG_FIELDS).order_by(
                    *self.get_ordering_fields()).filter(
                        **self.get_filter_fields()))

    def get_serializer(self, *args, **kwargs):
        kwargs = kwargs.copy()
        kwargs['fields'] = self.get_group_fields().keys(
        ) + self.AGG_FIELDS.keys()
        return super(DonationAggViewSet, self).get_serializer(*args, **kwargs)
コード例 #14
0
 def rating(self):
     avg_score = Review.objects.filter(title=self).aggregate(
         rating=Avg('score'))
     rating = avg_score['rating']
     return rating
コード例 #15
0
ファイル: views.py プロジェクト: kate-ka/itea_django
def update_product_rating(instance, **kwargs):
    instance.product.rating = instance.product.product_reviews.aggregate(
        average_rating=Avg('rating'))['average_rating']
    instance.product.save()
コード例 #16
0
    def _ledgers_per_case(self):
        db_name = get_db_aliases_for_partitioned_query()[
            0]  # just query one shard DB
        results = (LedgerValue.objects.using(db_name).filter(
            domain=self.domain).values('case_id').annotate(
                ledger_count=Count('pk')))[:100]

        case_ids = set()
        ledger_count = 0
        for result in results:
            case_ids.add(result['case_id'])
            ledger_count += result['ledger_count']

        if not case_ids:
            self.stdout.write("Domain has no ledgers")
            return

        avg_ledgers_per_case = ledger_count / len(case_ids)
        case_types_result = CaseES(for_export=True)\
            .domain(self.domain).case_ids(case_ids)\
            .aggregation(TermsAggregation('types', 'type.exact'))\
            .size(0).run()

        case_types = case_types_result.aggregations.types.keys

        self.stdout.write('\nCase Types with Ledgers')
        for type_ in case_types:
            self._print_value(
                'case_type', type_,
                CaseES().domain(self.domain).case_type(type_).count())
            db_name = get_db_aliases_for_partitioned_query()[
                0]  # just query one shard DB
            results = (CommCareCase.objects.using(db_name).filter(
                domain=self.domain, closed=True, type=type_).annotate(
                    lifespan=F('closed_on') - F('opened_on')).annotate(
                        avg_lifespan=Avg('lifespan')).values('avg_lifespan',
                                                             flat=True))
            self._print_value('Average lifespan for "%s" cases' % type_,
                              results[0]['avg_lifespan'])

            self._cases_created_per_user_per_month(type_)

        self._print_value('Average ledgers per case', avg_ledgers_per_case)

        stats = defaultdict(list)
        for db_name, case_ids_p in split_list_by_db_partition(case_ids):
            transactions_per_case_per_month = (
                LedgerTransaction.objects.using(db_name).filter(
                    case_id__in=case_ids).annotate(
                        m=Month('server_date'), y=Year('server_date')).values(
                            'case_id', 'y', 'm').annotate(count=Count('id')))
            for row in transactions_per_case_per_month:
                month = date(row['y'], row['m'], 1)
                stats[month].append(row['count'])

        final_stats = []
        for month, transaction_count_list in sorted(list(stats.items()),
                                                    key=lambda r: r[0]):
            final_stats.append(
                (month.isoformat(),
                 sum(transaction_count_list) // len(transaction_count_list)))

        self.stdout.write('Ledger updates per case')
        self._print_table(['Month', 'Ledgers updated per case'], final_stats)
コード例 #17
0
 def update_pos(self):
     aggregation = self.battlecontuberniuminturn_set.all().aggregate(
         avg_x=Avg('x_pos'), avg_z=Avg('z_pos'))
     self.x_pos = math.floor(aggregation['avg_x'])
     self.z_pos = math.floor(aggregation['avg_z'])
     self.save()
コード例 #18
0
ファイル: models.py プロジェクト: marteoma/Hoop
 def score(self):
     return LinkedPlayer.objects.filter(player=self).aggregate(
         Avg('score'))['score__avg']
コード例 #19
0
ファイル: serializers.py プロジェクト: deniskolosov/shop-api
 def get_avg_rating(self, obj):
     return obj.reviews.filter(approved=True).aggregate(
         Avg('rating')).get('rating__avg')
コード例 #20
0
    def _refresh_title_norms(self, full=False):
        """
        Refreshes the value of the title_norm field.

        This needs to be set to 'lavg/ld' where:
         - lavg is the average length of titles in all documents (also in terms)
         - ld is the length of the title field in this document (in terms)
        """

        lavg = self.entries.annotate(title_length=Length('title')).filter(title_length__gt=0).aggregate(Avg('title_length'))['title_length__avg']

        if full:
            # Update the whole table
            # This is the most accurate option but requires a full table rewrite
            # so we can't do it too often as it could lead to locking issues.
            entries = self.entries

        else:
            # Only update entries where title_norm is 1.0
            # This is the default value set on new entries.
            # It's possible that other entries could have this exact value but there shouldn't be too many of those
            entries = self.entries.filter(title_norm=1.0)

        entries.annotate(title_length=Length('title')).filter(title_length__gt=0).update(title_norm=lavg / F('title_length'))
コード例 #21
0
 def resolve_score(instance, args):
     return Rating.objects.filter(beer=instance.id).aggregate(Avg('score'))['score__avg']
コード例 #22
0
def add_movieuser_avg_score():
    for movieUser in MovieUser.objects.filter(score_avg__isnull=True):
        movieUser.score_avg = movieUser.comments.aggregate(
            avg=Avg('score'))['avg']
        movieUser.save()
コード例 #23
0
def process_non_facility_warehouse_data(location, start_date, end_date, runner=None, strict=True):
    facs = get_non_archived_facilities_below(location, end_date)

    start_date = datetime(start_date.year, start_date.month, 1)
    end_date = datetime(end_date.year, end_date.month, 1)

    if runner:
        runner.location = location
        runner.save()
    fac_ids = [f.location_id for f in facs]
    logging.info("processing non-facility %s (%s), %s children"
                 % (location.name, str(location.location_id), len(facs)))
    prods = SQLProduct.objects.filter(domain=location.domain, is_archived=False)

    sub_summaries = OrganizationSummary.objects.filter(
        location_id__in=fac_ids, date__range=(start_date, end_date), average_lead_time_in_days__gt=0
    ).values('date').annotate(average_time=Avg('average_lead_time_in_days'))

    sub_summaries = {
        (subsummary['date'].year, subsummary['date'].month): subsummary
        for subsummary in sub_summaries
    }

    sub_prods = ProductAvailabilityData.objects.filter(
        location_id__in=fac_ids, date__range=(start_date, end_date)
    ).values('product', 'date').annotate(
        total_sum=Sum('total'),
        with_stock_sum=Sum('with_stock'),
        without_stock_sum=Sum('without_stock'),
    )

    sub_prods = {
        ((sub_prod['date'].year, sub_prod['date'].month), sub_prod['product']): sub_prod for sub_prod in sub_prods
    }

    sub_group_summaries = GroupSummary.objects.filter(
        org_summary__location_id__in=fac_ids,
        org_summary__date__range=(start_date, end_date)
    ).values('title', 'org_summary__date').annotate(
        total_sum=Sum('total'),
        responded_sum=Sum('responded'),
        on_time_sum=Sum('on_time'),
        complete_sum=Sum('complete')
    )

    sub_group_summaries = {
        ((sub_group_summary['org_summary__date'].year, sub_group_summary['org_summary__date'].month), sub_group_summary['title']): sub_group_summary
        for sub_group_summary in sub_group_summaries
    }

    total_orgs = len(facs)
    for year, month in months_between(start_date, end_date):
        window_date = datetime(year, month, 1)
        org_summary = OrganizationSummary.objects.get_or_create(
            location_id=location.location_id, date=window_date
        )[0]

        org_summary.total_orgs = total_orgs

        # lead times
        if (year, month) in sub_summaries:
            sub_summary = sub_summaries[year, month]
            org_summary.average_lead_time_in_days = sub_summary['average_time']
        else:
            org_summary.average_lead_time_in_days = 0

        org_summary.save()
        # product availability
        for p in prods:
            product_data = ProductAvailabilityData.objects.get_or_create(product=p.product_id,
                                                                         location_id=location.location_id,
                                                                         date=window_date)[0]

            sub_prod = sub_prods.get(((year, month), p.product_id), {})

            product_data.total = sub_prod.get('total_sum', 0)
            if strict:
                assert product_data.total == total_orgs, \
                    "total should match number of sub facilities %s-%s" % (product_data.total, total_orgs)
            product_data.with_stock = sub_prod.get('with_stock_sum', 0)
            product_data.without_stock = sub_prod.get('without_stock_sum', 0)
            product_data.without_data = product_data.total - product_data.with_stock - product_data.without_stock
            product_data.save()

        dg = DeliveryGroups(month=month, facs=facs)
        for status_type in const.NEEDED_STATUS_TYPES:
            gsum = GroupSummary.objects.get_or_create(org_summary=org_summary, title=status_type)[0]

            sub_sum = sub_group_summaries.get(((year, month), status_type), {})

            gsum.total = sub_sum.get('total_sum', 0)
            gsum.responded = sub_sum.get('responded_sum', 0)
            gsum.on_time = sub_sum.get('on_time_sum', 0)
            gsum.complete = sub_sum.get('complete_sum', 0)
            gsum.save()

            if status_type == SupplyPointStatusTypes.DELIVERY_FACILITY:
                expected = len(dg.delivering())
            elif status_type == SupplyPointStatusTypes.R_AND_R_FACILITY:
                expected = len(dg.submitting())
            elif status_type == SupplyPointStatusTypes.SOH_FACILITY \
                    or status_type == SupplyPointStatusTypes.SUPERVISION_FACILITY:
                expected = len(facs)
            if gsum.total != expected:
                logging.info("expected %s but was %s for %s" % (expected, gsum.total, gsum))

        for alert_type in [const.RR_NOT_SUBMITTED, const.DELIVERY_NOT_RECEIVED,
                           const.SOH_NOT_RESPONDING, const.RR_NOT_RESPONDED, const.DELIVERY_NOT_RESPONDING]:
            sub_alerts = Alert.objects.filter(location_id__in=fac_ids, date=window_date, type=alert_type)
            aggregate_response_alerts(location.location_id, window_date, sub_alerts, alert_type)

    update_historical_data_for_location(location)
コード例 #24
0
def pm25_city_hour(request):
    localtime = time.localtime(time.time())
    t1 = time.strftime("%Y-%m-%d %H:00:00", localtime)
    year = localtime.tm_year
    month = localtime.tm_mon
    day = localtime.tm_mday
    hour = int(localtime.tm_hour) - 1
    if hour == -1:
        day -= 1
        hour = 23

    city = request.GET.get("city", None)
    data = {}
    if city:
        data_hour = [[], [], [], [], [], [], []]
        hours = []
        for i in range(11):
            if (hour - 1) < -1:
                hour = 23
                day -= 1
            if month < 10:
                _month = '0' + str(month)
            else:
                _month = month
            if day < 10:
                _day = '0' + str(day)
            else:
                _day = day
            if hour < 10:
                _hour = '0' + str(hour)
            else:
                _hour = hour

            pm_data = PlacePm.objects.filter(area=city,
                                             time_point='%s-%s-%s %s:00:00' %
                                             (year, _month, _day, _hour))
            data_hour[0].append(pm_data.aggregate(Avg("aqi"))['aqi__avg'])
            data_hour[1].append(pm_data.aggregate(Avg("co"))['co__avg'])
            data_hour[2].append(pm_data.aggregate(Avg("no2"))['no2__avg'])
            data_hour[3].append(pm_data.aggregate(Avg("o3"))['o3__avg'])
            data_hour[4].append(pm_data.aggregate(Avg("pm10"))['pm10__avg'])
            data_hour[5].append(pm_data.aggregate(Avg("pm2_5"))['pm2_5__avg'])
            data_hour[6].append(pm_data.aggregate(Avg("so2"))['so2__avg'])
            hours.append(str(day) + '日' + str(_hour) + '时')

            hour -= 1

        # data_month = [[],[],[],[],[],[],[]]
        # months = []
        # for i in range(12):
        # 	if (month - 1) < 0:
        # 		month = 12
        # 		year -= 1
        # 	pm_data = PlacePm.objects.filter(area = city, time_point__year=year, time_point__month = month)
        # 	data_month[0].append(pm_data.aggregate(Avg("aqi"))['aqi__avg'])
        # 	data_month[1].append(pm_data.aggregate(Avg("co"))['co__avg'])
        # 	data_month[2].append(pm_data.aggregate(Avg("no2"))['no2__avg'])
        # 	data_month[3].append(pm_data.aggregate(Avg("o3"))['o3__avg'])
        # 	data_month[4].append(pm_data.aggregate(Avg("pm10"))['pm10__avg'])
        # 	data_month[5].append(pm_data.aggregate(Avg("pm2_5"))['pm2_5__avg'])
        # 	data_month[6].append(pm_data.aggregate(Avg("so2"))['so2__avg'])
        # 	months.append(str(year) + '年' + str(month) + '月')

        # 	month -= 1

        data['data_hour'] = data_hour
        data['hours'] = hours
        # data['data_month'] = data_month
        # data['months'] = months

    return HttpResponse(json.dumps(data))
コード例 #25
0
    def _ledgers_per_case(self):
        results = (
            LedgerES(es_instance_alias=ES_EXPORT_INSTANCE)
            .domain(self.domain).aggregation(
                TermsAggregation('by_case', 'case_id', size=100)
            ).size(0).run()
        )

        ledgers_per_case = results.aggregations.by_case
        case_ids = set()
        ledger_counts = []
        for case_id, ledger_count in ledgers_per_case.counts_by_bucket().items():
            case_ids.add(case_id)
            ledger_counts.append(ledger_count)

        if not case_ids:
            self.stdout.write("Domain has no ledgers")
            return

        avg_ledgers_per_case = sum(ledger_counts) // len(case_ids)
        case_types_result = CaseES(es_instance_alias=ES_EXPORT_INSTANCE)\
            .domain(self.domain).case_ids(case_ids)\
            .aggregation(TermsAggregation('types', 'type'))\
            .size(0).run()

        case_types = case_types_result.aggregations.types.keys

        self.stdout.write('\nCase Types with Ledgers')
        for type_ in case_types:
            self._print_value('case_type', type_, CaseES().domain(self.domain).case_type(type_).count())
            if should_use_sql_backend(self.domain):
                db_name = get_db_aliases_for_partitioned_query()[0]  # just query one shard DB
                results = (
                    CommCareCaseSQL.objects.using(db_name).filter(domain=self.domain, closed=True, type=type_)
                    .annotate(lifespan=F('closed_on') - F('opened_on'))
                    .annotate(avg_lifespan=Avg('lifespan'))
                    .values('avg_lifespan', flat=True)
                )
                self._print_value('Average lifespan for "%s" cases' % type_, results[0]['avg_lifespan'])

            self._cases_created_per_user_per_month(type_)

        self._print_value('Average ledgers per case', avg_ledgers_per_case)

        if should_use_sql_backend(self.domain):
            stats = defaultdict(list)
            for db_name, case_ids_p in split_list_by_db_partition(case_ids):
                transactions_per_case_per_month = (
                    LedgerTransaction.objects.using(db_name).filter(case_id__in=case_ids)
                    .annotate(m=Month('server_date'), y=Year('server_date')).values('case_id', 'y', 'm')
                    .annotate(count=Count('id'))
                )
                for row in transactions_per_case_per_month:
                    month = date(row['y'], row['m'], 1)
                    stats[month].append(row['count'])
        else:
            transactions_per_case_per_month = (
                StockTransaction.objects.filter(case_id__in=case_ids)
                .annotate(m=Month('report__date'), y=Year('report__date')).values('case_id', 'y', 'm')
                .annotate(count=Count('id'))
            )

            stats = defaultdict(list)
            for row in transactions_per_case_per_month:
                month = date(row['y'], row['m'], 1)
                stats[month].append(row['count'])

        final_stats = []
        for month, transaction_count_list in sorted(list(stats.items()), key=lambda r: r[0]):
            final_stats.append((month.isoformat(), sum(transaction_count_list) // len(transaction_count_list)))

        self._print_table(['Month', 'Ledgers updated per case'], final_stats)
コード例 #26
0
def index(request):
    localtime = time.localtime(time.time())
    t1 = time.strftime("%Y-%m-%d %H:00:00", localtime)
    year = localtime.tm_year
    month = localtime.tm_mon
    day = localtime.tm_mday
    hour = int(localtime.tm_hour) - 1
    if hour == -1:
        day -= 1
        hour = 23

    if month < 10:
        _month = '0' + str(month)
    else:
        _month = month
    if day < 10:
        _day = '0' + str(day)
    else:
        _day = day
    if hour < 10:
        _hour = '0' + str(hour)
    else:
        _hour = hour

    data = {}
    # data['map'] = {}
    for city in citys:
        pm_data = PlacePm.objects.filter(area=city,
                                         time_point='%s-%s-%s %s:00:00' %
                                         (year, _month, _day, _hour))
        data[city] = pm_data.aggregate(Avg("aqi"))['aqi__avg']

    # data_hour = [[],[],[],[],[],[],[]]
    # hours = []
    # for i in range(11):
    # 	if(hour - 1) < -1:
    # 		hour = 23
    # 		day -= 1
    # 	if month < 10:
    # 		_month = '0' + str(month)
    # 	else:
    # 		_month = month
    # 	if day < 10:
    # 		_day = '0' + str(day)
    # 	else:
    # 		_day = day
    # 	if hour < 10:
    # 		_hour = '0' + str(hour)
    # 	else:
    # 		_hour = hour

    # 	pm_data = PlacePm.objects.filter(area = '长沙', time_point = '%s-%s-%s %s:00:00' %(year, _month, _day, _hour))
    # 	data_hour[0].append(pm_data.aggregate(Avg("aqi"))['aqi__avg'])
    # 	data_hour[1].append(pm_data.aggregate(Avg("co"))['co__avg'])
    # 	data_hour[2].append(pm_data.aggregate(Avg("no2"))['no2__avg'])
    # 	data_hour[3].append(pm_data.aggregate(Avg("o3"))['o3__avg'])
    # 	data_hour[4].append(pm_data.aggregate(Avg("pm10"))['pm10__avg'])
    # 	data_hour[5].append(pm_data.aggregate(Avg("pm2_5"))['pm2_5__avg'])
    # 	data_hour[6].append(pm_data.aggregate(Avg("so2"))['so2__avg'])
    # 	hours.append([day, hour])

    # 	hour -= 1

    # data_month = [[],[],[],[],[],[],[]]
    # months = []
    # for i in range(12):
    # 	if (month - 1) < 0:
    # 		month = 12
    # 		year -= 1
    # 	pm_data = PlacePm.objects.filter(area = '长沙', time_point__year=year, time_point__month = month)
    # 	data_month[0].append(pm_data.aggregate(Avg("aqi"))['aqi__avg'])
    # 	data_month[1].append(pm_data.aggregate(Avg("co"))['co__avg'])
    # 	data_month[2].append(pm_data.aggregate(Avg("no2"))['no2__avg'])
    # 	data_month[3].append(pm_data.aggregate(Avg("o3"))['o3__avg'])
    # 	data_month[4].append(pm_data.aggregate(Avg("pm10"))['pm10__avg'])
    # 	data_month[5].append(pm_data.aggregate(Avg("pm2_5"))['pm2_5__avg'])
    # 	data_month[6].append(pm_data.aggregate(Avg("so2"))['so2__avg'])
    # 	months.append([year, month])

    # 	month -= 1

    # data['data_hour'] = data_hour
    # data['hours'] = hours
    # data['data_month'] = data_month
    # data['months'] = months

    return HttpResponse(json.dumps(data))
コード例 #27
0
def get_awc_reports_system_usage(domain,
                                 config,
                                 month,
                                 prev_month,
                                 two_before,
                                 loc_level,
                                 show_test=False):
    def get_data_for(filters, date):
        queryset = AggAwcMonthly.objects.filter(
            month=datetime(*date), **filters).values(loc_level).annotate(
                awc_open=Sum('awc_days_open'),
                weighed=Sum('wer_weighed'),
                all=Sum('wer_eligible'),
            )
        if not show_test:
            queryset = apply_exclude(domain, queryset)
        return queryset

    chart_data = DailyAttendanceView.objects.filter(
        pse_date__range=(datetime(*two_before), datetime(*month)),
        **config).values('pse_date', 'aggregation_level').annotate(
            awc_count=Sum('awc_open_count'),
            attended_children=Avg('attended_children_percent')).order_by(
                'pse_date')

    if not show_test:
        chart_data = apply_exclude(domain, chart_data)

    awc_count_chart = []
    attended_children_chart = []
    for row in chart_data:
        date = row['pse_date']
        date_in_milliseconds = int(date.strftime("%s")) * 1000
        awc_count_chart.append([date_in_milliseconds, row['awc_count']])
        attended_children_chart.append(
            [date_in_milliseconds, row['attended_children'] or 0])

    this_month_data = get_data_for(config, month)
    prev_month_data = get_data_for(config, prev_month)

    return {
        'kpi': [[{
            'label':
            _('AWC Days Open'),
            'help_text':
            _(("The total number of days the AWC is open in the given month. The AWC is expected to "
               "be open 6 days a week (Not on Sundays and public holidays)")),
            'percent':
            percent_increase(
                'awc_open',
                this_month_data,
                prev_month_data,
            ),
            'value':
            get_value(this_month_data, 'awc_open'),
            'all':
            '',
            'format':
            'number',
            'frequency':
            'month'
        }, {
            'label':
            _(("Percentage of eligible children (ICDS beneficiaries between 0-6 years) "
               "who have been weighed in the current month")),
            'help_text':
            _('Percentage of AWCs with a functional toilet'),
            'percent':
            percent_diff('weighed', this_month_data, prev_month_data, 'all'),
            'value':
            get_value(this_month_data, 'weighed'),
            'all':
            get_value(this_month_data, 'all'),
            'format':
            'percent_and_div',
            'frequency':
            'month'
        }]],
        'charts': [[{
            'key': 'AWC Days Open Per Week',
            'values': awc_count_chart,
            "classed": "dashed",
        }],
                   [{
                       'key': 'PSE- Average Weekly Attendance',
                       'values': attended_children_chart,
                       "classed": "dashed",
                   }]],
    }
コード例 #28
0
def execute(backend, field, **kwargs):
    return backend.query(**kwargs).aggregate(x=Avg(field))['x']
コード例 #29
0
def average_playtime(start_date, end_date):
    return _get_sessions(start_date,
                         end_date).aggregate(len=Avg('session_seconds'))
コード例 #30
0
class AgencyViewSet(viewsets.ModelViewSet):
    """API views for Agency"""

    # pylint: disable=too-many-public-methods
    queryset = (Agency.objects.order_by("id").select_related(
        "jurisdiction", "parent", "appeal_agency").prefetch_related(
            Prefetch(
                "emails",
                queryset=EmailAddress.objects.filter(
                    status="good",
                    agencyemail__request_type="primary",
                    agencyemail__email_type="to",
                ),
                to_attr="primary_emails",
            ),
            Prefetch(
                "phones",
                queryset=PhoneNumber.objects.filter(
                    type="fax",
                    status="good",
                    agencyphone__request_type="primary"),
                to_attr="primary_faxes",
            ),
            Prefetch(
                "addresses",
                queryset=Address.objects.filter(
                    agencyaddress__request_type="primary"),
                to_attr="primary_addresses",
            ),
            "types",
        ).annotate(
            average_response_time_=Coalesce(
                ExtractDay(
                    Avg(
                        F("foiarequest__datetime_done") -
                        F("foiarequest__composer__datetime_submitted"))),
                Value(0),
            ),
            fee_rate_=Coalesce(
                100 * CountWhen(foiarequest__price__gt=0,
                                output_field=FloatField()) /
                NullIf(
                    Count("foiarequest"), Value(0), output_field=FloatField()),
                Value(0),
            ),
            success_rate_=Coalesce(
                100 * CountWhen(
                    foiarequest__status__in=["done", "partial"],
                    output_field=FloatField(),
                ) / NullIf(
                    Count("foiarequest"), Value(0), output_field=FloatField()),
                Value(0),
            ),
            number_requests=Count("foiarequest"),
            number_requests_completed=CountWhen(foiarequest__status="done"),
            number_requests_rejected=CountWhen(foiarequest__status="rejected"),
            number_requests_no_docs=CountWhen(foiarequest__status="no_docs"),
            number_requests_ack=CountWhen(foiarequest__status="ack"),
            number_requests_resp=CountWhen(foiarequest__status="processed"),
            number_requests_fix=CountWhen(foiarequest__status="fix"),
            number_requests_appeal=CountWhen(foiarequest__status="appealing"),
            number_requests_pay=CountWhen(foiarequest__status="payment"),
            number_requests_partial=CountWhen(foiarequest__status="partial"),
            number_requests_lawsuit=CountWhen(foiarequest__status="lawsuit"),
            number_requests_withdrawn=CountWhen(
                foiarequest__status="abandoned"),
        ))
    serializer_class = AgencySerializer
    # don't allow ordering by computed fields
    ordering_fields = [
        f for f in AgencySerializer.Meta.fields
        if f not in ("absolute_url", "average_response_time", "fee_rate",
                     "success_rate") and not f.startswith(("has_", "number_"))
    ]

    def get_queryset(self):
        """Filter out non-approved agencies for non-staff"""
        if self.request.user.is_staff:
            return self.queryset
        else:
            return self.queryset.filter(status="approved")

    class Filter(django_filters.FilterSet):
        """API Filter for Agencies"""

        jurisdiction = django_filters.NumberFilter(
            field_name="jurisdiction__id")
        types = django_filters.CharFilter(field_name="types__name",
                                          lookup_expr="iexact")

        class Meta:
            model = Agency
            fields = ("name", "status", "jurisdiction", "types",
                      "requires_proxy")

    filterset_class = Filter