Пример #1
0
def CallDictFunc(request):
    profile_list = Profile.objects.all()
    #print(profile_list)
    for row in profile_list.values_list():
        print(row)

    print(Profile.objects.aggregate(Avg('age')))
    print(Profile.objects.aggregate(Max('age')))
    print(Profile.objects.aggregate(Sum('age')))
    print(Profile.objects.aggregate(Count('age')))
    print(Profile.objects.filter(name='홍길동').aggregate(
        Count('age')))  # filter로 where 조건을 나타낸다.
    #이름이 홍길동인 튜플들 갯수 출력

    print(len(profile_list))
    # values() + aggregate() 그룹별 평균 나이는?
    qs = Profile.objects.values('name').annotate(
        Avg('age'))  # name별로 그룹을 묶어서 age의 평균을 출력.
    for r in qs:
        print(r)

    # 결과를 list로 감싸서 dict type으로 클라이언트에게 출력하기
    pro_list = []

    for pro in profile_list:
        pro_dict = {}
        pro_dict['name'] = pro.name
        pro_dict['age'] = pro.age
        pro_list.append(pro_dict)
        print(pro_list)

    context = {'pro_dicts': pro_list}

    return render(request, 'abc.html', context)
Пример #2
0
    def test_aggregations(self):
        for age, birthday in (
            [4, (2007, 12, 25)],
            [4, (2006, 1, 1)],
            [1, (2008, 12, 1)],
            [4, (2006, 6, 1)],
            [12, (1998, 9, 1)],
        ):
            Person.objects.create(age=age, birthday=datetime(*birthday))

        aggregates = Person.objects.aggregate(Min('age'),
                                              Max('age'),
                                              avgage=Avg('age'))
        self.assertEqual(aggregates, {
            'age__min': 1,
            'age__max': 12,
            'avgage': 5.0
        })

        # With filters and testing the sqlaggregates->mongoaggregate
        # conversion.
        aggregates = Person.objects.filter(age__gte=4).aggregate(
            Min('birthday'), Max('birthday'), Avg('age'), Count('id'))
        self.assertEqual(
            aggregates, {
                'birthday__max': datetime(2007, 12, 25, 0, 0),
                'birthday__min': datetime(1998, 9, 1, 0, 0),
                'age__avg': 6.0,
                'id__count': 4,
            })
Пример #3
0
    def __get_summary(self):
        """
        Returns a dictionary which has summarized statistics of this exercise. The dictionary is
        generated only once and saved into a private field to improve performance with subsequent
        calls.
        
        @return: a dictionary keys: submission_count, average_grade, average_submissions and
        submitter_count
        """
        if not hasattr(self, "temp_summary"):
            submission_count = self.submissions.count()
            submitter_count = UserProfile.objects.distinct().filter(
                submissions__exercise=self).count()

            average_grade = UserProfile.objects.distinct().filter(
                submissions__exercise=self).annotate(
                    best_grade=Max('submissions__grade')).aggregate(
                        average_grade=Avg('best_grade'))["average_grade"]
            average_submissions = UserProfile.objects.distinct(
            ).filter(submissions__exercise=self).annotate(
                submission_count=Count('submissions')).aggregate(
                    avg_submissions=Avg('submission_count'))["avg_submissions"]

            if average_grade == None:
                average_grade = 0
            if average_submissions == None:
                average_submissions = 0

            self.temp_summary = {
                "submission_count": submission_count,
                "average_grade": average_grade,
                "submitter_count": submitter_count,
                "average_submissions": average_submissions,
            }
        return self.temp_summary
Пример #4
0
def run():
    """ Runs the script """
    # First do it for the cantons which have fetched cities
    countries = Country.objects.all()
    country_count = countries.count()
    for idx1, country in enumerate(countries):
        nr_of_country_cantons = Canton.objects.filter(country=country).count()
        cantons = Canton.objects.filter(Q(name='Vaud') | Q(name='Genève'))
        canton_count = cantons.count()
        for idx2, canton in enumerate(cantons):
            metrics = City.objects.filter(canton=canton).aggregate(
                Avg('tree_density'), Avg('tree_sparsity'))
            if metrics['tree_density__avg'] is not None:
                tree_density = metrics['tree_density__avg']
                tree_sparsity = metrics['tree_sparsity__avg']
                cars_pp = canton.cars / canton.population
                rel_population = canton.population / country.population * nr_of_country_cantons
                rel_area = canton.area / country.area * nr_of_country_cantons
                canton.tree_density = tree_density
                canton.tree_sparsity = tree_sparsity
                canton.score = np.dot(weights, [
                    tree_density,
                    functions.sparsity_to_representable(tree_sparsity),
                    (1 - cars_pp), rel_population, rel_area
                ])
                canton.has_mapping = True
                canton.save()
            print('Fetched canton {}/{} ({})'.format(idx2 + 1, canton_count,
                                                     canton.name))
        print('Fetched country {}/{} ({})'.format(idx1 + 1, country_count,
                                                  country.name))
Пример #5
0
    def rows(self):
        soh_data = []

        if 'data_config' in self.config:
            data_config = self.config['data_config']
            late = data_config.soh_data_total.get('late', 0)
            on_time = data_config.soh_data_total.get('on_time', 0)
            soh_data.append(
                GroupSummary(title=SupplyPointStatusTypes.SOH_FACILITY,
                             responded=late + on_time,
                             on_time=on_time,
                             complete=late + on_time,
                             total=len(data_config.descendants)))
            return soh_data

        if self.config['org_summary']:
            try:
                sohs = GroupSummary.objects.filter(
                    title=SupplyPointStatusTypes.SOH_FACILITY,
                    org_summary__in=self.config['org_summary']).aggregate(
                        Avg('responded'), Avg('on_time'), Avg('complete'),
                        Max('total'))

                soh_data.append(
                    GroupSummary(title=SupplyPointStatusTypes.SOH_FACILITY,
                                 responded=sohs['responded__avg'],
                                 on_time=sohs['on_time__avg'],
                                 complete=sohs['complete__avg'],
                                 total=sohs['total__max']))
            except GroupSummary.DoesNotExist:
                return soh_data
        return soh_data
Пример #6
0
    def rows(self):
        del_data = []
        if 'data_config' in self.config:
            data_config = self.config['data_config']
            delivered = data_config.delivery_data_total.get('received', 0)
            not_delivered = data_config.delivery_data_total.get(
                'not_received', 0)
            del_data.append(
                GroupSummary(title=SupplyPointStatusTypes.DELIVERY_FACILITY,
                             responded=delivered + not_delivered,
                             on_time=delivered + not_delivered,
                             complete=delivered,
                             total=data_config.delivery_data_total.get(
                                 'total', 0)))
            return del_data

        if self.config['org_summary']:
            try:
                data = GroupSummary.objects.filter(
                    title=SupplyPointStatusTypes.DELIVERY_FACILITY,
                    org_summary__in=self.config['org_summary']).aggregate(
                        Avg('responded'), Avg('on_time'), Avg('complete'),
                        Max('total'))

                del_data.append(
                    GroupSummary(
                        title=SupplyPointStatusTypes.DELIVERY_FACILITY,
                        responded=data['responded__avg'],
                        on_time=data['on_time__avg'],
                        complete=data['complete__avg'],
                        total=data['total__max']))
            except GroupSummary.DoesNotExist:
                return del_data
        return del_data
Пример #7
0
def get_averages(request):

    matches = Match.objects.values('team_number').annotate(
        total_matches=Count('team_number'),
        auto_yellow_stacked_totes=Avg('auto_yellow_stacked_totes'),
        auto_yellow_moved_totes=Avg('auto_yellow_moved_totes'),
        auto_moved_to_auto_zone=Avg('auto_moved_to_auto_zone'),
        auto_moved_containers=Avg('auto_moved_containers'),
        tele_pushed_litter=Avg('tele_pushed_litter'),
        tele_placed_in_container_litter=Avg('tele_placed_in_container_litter'),
        totestack_start_height=Avg('totestack__start_height'),
        totestack_totes_added=Avg('totestack__totes_added'),
        number_of_totestacks=Count('totestack'),
        containerstack_height=Avg('containerstack__height'),
        number_of_containerstacks=Avg('containerstack'),
    )

    teams = []
    auto_scores = []
    tele_scores = []

    unprocessed_matches = {}

    for match in matches:
        auto_score = 0

        # multiply by point value, divide by robots per alliance

        if match['auto_moved_to_auto_zone'] is not None:
            auto_score += (match['auto_moved_to_auto_zone'] / 3 * 4)

        if match['auto_moved_to_auto_zone'] is not None:
            auto_score += (match['auto_moved_to_auto_zone'] / 3 * 6)

        if match['auto_moved_to_auto_zone'] is not None:
            auto_score += (match['auto_moved_to_auto_zone'] / 3 * 20)

        tele_score = 0

        # worth 2 per tote stacked (average)
        if match['totestack_totes_added'] is not None:
            tele_score += match['totestack_totes_added'] * (
                match['number_of_totestacks'] / match['total_matches']) * 2

        if match['containerstack_height'] is not None:
            tele_score += match['containerstack_height'] * (
                match['number_of_containerstacks'] /
                match['total_matches']) * 4

        teams.append(match['team_number'])
        auto_scores.append(auto_score)
        tele_scores.append(tele_score)

    processed_matches = {
        'teams': teams,
        'auto_scores': auto_scores,
        'tele_scores': tele_scores
    }

    return HttpResponse(json.dumps(processed_matches))
Пример #8
0
def run():
    """ Runs the script """
    # First do it for the cities which have tiles
    countries = Country.objects.all()
    country_count = countries.count()
    for idx1, country in enumerate(countries):
        nr_of_country_cities = City.objects.filter(country=country).count()
        # TODO: REMOVE canton__name='Vaud'
        cities = City.objects.filter(country=country).filter(Q(canton__name='Vaud') | Q(canton__name='Genève'))
        city_count = cities.count()
        for idx2, city in enumerate(cities):
            metrics = Tile.objects.annotate(geom=Cast('mpoly', GeometryField())) \
                .filter(geom__within=city.mpoly) \
                .annotate(count=Count('*')) \
                .filter(count__gt=0) \
                .aggregate(Avg('tree_density'), Avg('tree_sparsity'))
            if metrics['tree_density__avg'] is not None: # Check if result returned, a.k.a has tiles
                tree_density = metrics['tree_density__avg']
                tree_sparsity = metrics['tree_sparsity__avg']
                cars_pp = city.cars / city.population
                rel_population = city.population / country.population * nr_of_country_cities
                rel_area = city.area / country.area * nr_of_country_cities
                city.tree_density = tree_density
                city.tree_sparsity = tree_sparsity
                city.score = np.dot(weights, [
                    tree_density,
                    functions.sparsity_to_representable(tree_sparsity),
                    (1 - cars_pp),
                    rel_population,
                    rel_area
                ])
                city.has_mapping = True
                city.save()
            print('Fetched city {}/{} ({})'.format(idx2 + 1, city_count, city.name))
        print('Fetched country {}/{} ({})'.format(idx1 + 1, country_count, country.name))
Пример #9
0
 def define_prices(self):
     return self.aggregate(
         ads_count=Count('id'),
         max_price_uzs=Max('price_uzs'),
         min_price_uzs=Min('price_uzs'),
         avg_price_uzs=Avg('price_uzs'),
         max_price_usd=Max('price_usd'),
         min_price_usd=Min('price_usd'),
         avg_price_usd=Avg('price_usd'),
     )
Пример #10
0
 def rows(self):
     product_availability = []
     if self.config['org_summary']:
         product_availability = ProductAvailabilityData.objects.filter(
             date__range=(self.config['startdate'], self.config['enddate']),
             location_id=self.config['org_summary'][0].location_id,
             product__in=self.config['products']).values(
                 'product').annotate(with_stock=Avg('with_stock'),
                                     without_data=Avg('without_data'),
                                     without_stock=Avg('without_stock'),
                                     total=Max('total'))
     return product_availability
Пример #11
0
    def get_media(self, initial_time, final_time):
        if self.is_time:
            return \
                self.points.filter(
                    y_accumulative__gte=initial_time,
                    y_accumulative__lte=final_time)\
                .order_by('x').aggregate(avg=Avg('y'))['avg']

        return \
            self.points.filter(
                x__gte=initial_time,
                x__lte=final_time).order_by('x').aggregate(avg=Avg('y'))['avg']
Пример #12
0
    def stats(cls, events=None):
        '''
        Returns stats on the events queryset provided.

        :param events: A queryset of events, that have the fields sessions, games, players
        '''
        if events is None:
            events = cls.implicit()

        if events:
            # Tailwind's Median aggregator does not work on Durations (PostgreSQL Intervals)
            # So we have to convert it to Epoch time. Extract is a Django method that can extract
            # 'epoch' which is the documented method of casting a PostgreSQL interval to epoch time.
            #    https://www.postgresql.org/message-id/19495.1059687790%40sss.pgh.pa.us
            # Django does not document 'epoch' alas but it works:
            #    https://docs.djangoproject.com/en/4.0/ref/models/database-functions/#extract
            # We need a Django ExpressionWrapper to cast the uration field to DurationField as
            # for some reason even though it's a PostgreSQL interval, Django still thinks of it
            # as a DateTimeField (from the difference of two DateTimeFields I guess and a bug/feature)
            # that fails tor ecast a difference of DateTimeFiled's as DurationField.
            epoch_duration = Extract(ExpressionWrapper(F('duration'), output_field=DurationField()), lookup_name='epoch')
            epoch_gap = Extract(ExpressionWrapper(F('gap_time'), output_field=DurationField()), lookup_name='epoch')

            result = events.aggregate(Min('sessions'),
                                      Avg('sessions'),
                                      Median('sessions'),
                                      Max('sessions'),
                                      Min('games'),
                                      Avg('games'),
                                      Median('games'),
                                      Max('games'),
                                      Min('players'),
                                      Avg('players'),
                                      Median('players'),
                                      Max('players'),
                                      duration__min=Min('duration'),
                                      duration__avg=Avg('duration'),
                                      duration__median=Median(epoch_duration),
                                      duration__max=Max('duration'),
                                      gap__min=Min('gap_time'),
                                      gap__avg=Avg('gap_time'),
                                      gap__median=Median(epoch_gap),
                                      gap__max=Max('gap_time'))

            # Aggregate is a QuerySet enpoint (i.e results in evaluation of the Query and returns
            # a standard dict. To wit we can cast teh Epch times back to Durations for the consumer.
            result['duration__median'] = timedelta(seconds=result['duration__median'])
            result['gap__median'] = timedelta(seconds=result['gap__median'])
        else:
            result = None

        return result
Пример #13
0
    def get(self, request: WSGIRequest):
        context = self.get_context(request=request)
        if not context["is_staff"]:
            messages.add_message(request, messages.INFO,
                                 "Only staff can access the judging panel.")
            return redirect("timathon:Home")

        try:
            challenge = Challenge.objects.get(ended=False,
                                              posted=True,
                                              type="MO")
        except Challenge.DoesNotExist:
            messages.add_message(request, messages.INFO,
                                 "There is no ongoing challenge right now.")
            return redirect("/")

        submissions = Submission.objects.filter(challenge=challenge)
        submissions = list(submissions)

        for submission in submissions:
            members = submission.team.members.all()
            discord_members = []
            for member in members:
                new_member = {}
                try:
                    user = SocialAccount.objects.get(user_id=member.id)
                except SocialAccount.DoesNotExist:
                    pass
                else:
                    new_member["user_id"] = user.uid
                    avatar_url = user.get_avatar_url()
                    if avatar_url.endswith("None.png"):
                        random = randint(0, 4)
                        avatar_url = (
                            f"https://cdn.discordapp.com/embed/avatars/{random}.png"
                        )
                    new_member["avatar_url"] = avatar_url
                    new_member["username"] = user.extra_data["username"]
                    new_member["discriminator"] = user.extra_data[
                        "discriminator"]
                    discord_members.append(new_member)
            submission.team.discord_members = discord_members

            submission.judges = [vote.user for vote in submission.votes.all()]
            submission.points = submission.votes.aggregate(
                average=Avg("c1") + Avg("c2") + Avg("c3") +
                Avg("c4"))["average"]

        context["submissions"] = submissions
        context["challenge"] = challenge
        return render(request, "timathon/judge.html", context)
Пример #14
0
 def get_ratings(self, obj):
     score = None
     if obj.type == USER_TYPE_DEVELOPER:
         query = Rating.objects.filter(
             tasks__closed=True, tasks__participants=obj, tasks__participation__accepted=True
         ).order_by('criteria')
         details = query.values('criteria').annotate(avg=Avg('score'))
         criteria_choices = dict(Rating._meta.get_field('criteria').flatchoices)
         for rating in details:
             rating['display_criteria'] = criteria_choices[rating['criteria']]
             rating['display_avg'] = rating['avg'] and '{:0,.0f}%'.format(rating['avg']*10)
         avg = query.aggregate(avg=Avg('score'))['avg']
         score = {'avg': avg, 'display_avg': avg and '{:0,.0f}%'.format(avg*10) or None, 'details': details}
     return score
Пример #15
0
def water_consumption_report(args):
    """
    Отчет по воде
    
    """
    data = {}
    items = []
    report_date = parse(args['date']).date()
    for branch in Branch.objects.all():
        items.append({'name': branch.name, 'is_branch': True})

        for thermal in ThermalArea.objects.filter(branch__id=branch.id):

            outdoor_temp_avg = Environment.objects.filter(
                date=report_date,
                boiler__thermalArea__id=thermal.id,
                outdoor_temp_actual__gt=-273.0).aggregate(
                    Avg('outdoor_temp_actual'))

            temp_avg = WaterConsumption.objects.filter(date = report_date,
                                                       category__name = u"Общий расход",
                                                       category__active = True,
                                                       category__boiler__thermalArea__id = thermal.id,
                                                       backward_temperature_actual__gt = -273.0,
                                                       farward_temperature_actual__gt = -273.0,
                                                       backward_temperature_estimated__gt = -273.0,
                                                       farward_temperature_estimated__gt = -273.0) \
                                        .aggregate(Avg('backward_temperature_actual'),
                                                   Avg('farward_temperature_actual'),
                                                   Avg('backward_temperature_estimated'),
                                                   Avg('farward_temperature_estimated'))

            items.append({
                'name':
                thermal.name,
                'outdoor_temp_actual':
                outdoor_temp_avg['outdoor_temp_actual__avg'],
                'backward_temperature_actual':
                temp_avg['backward_temperature_actual__avg'],
                'farward_temperature_actual':
                temp_avg['farward_temperature_actual__avg'],
                'backward_temperature_estimated':
                temp_avg['backward_temperature_estimated__avg'],
                'farward_temperature_estimated':
                temp_avg['farward_temperature_estimated__avg']
            })

    data['items'] = items
    data['date'] = report_date
    return (data, 'water/temperature_report.html')
Пример #16
0
def _get_efficiency_statistics(model, owner, field="efficiency", count=False, worth=False, worth_field='value'):
    eff_values = {}
    eff_map = {
        "efficiency__avg": "Efficiency (Average)",
        "efficiency__stddev": "Efficiency (Standard Deviation)",
        "efficiency__min": "Efficiency (Minimum)",
        "efficiency__perc25": "Efficiency (25th Percentile)",
        "efficiency__median": "Efficiency (Median)",
        "efficiency__perc75": "Efficiency (75th Percentile)",
        "efficiency__max": "Efficiency (Maximum)",
        "efficiency__count": "Count",
        "value__sum": "Worth",
    }
    aggregations = [
        Avg(field),
        StdDev(field),
        Min(field),
        Perc25(field),
        Median(field),
        Perc75(field),
        Max(field)
    ]
    if worth:
        aggregations.insert(0, Sum(worth_field))
    if count:
        aggregations.insert(0, Count(field))

    efficiencies = model.objects.filter(owner=owner).aggregate(*aggregations)
    for eff_key, eff_val in efficiencies.items():
        eff_values[eff_map[eff_key]] = round(eff_val or 0, 2)

    return eff_values
Пример #17
0
    def _forms_per_user_per_month(self):
        performance_threshold = get_performance_threshold(self.domain)
        base_queryset = MALTRow.objects.filter(
            domain_name=self.domain,
            month__gte=self.date_start,
        )

        if self.date_end:
            base_queryset.filter(month__lt=self.date_end)

        user_stat_from_malt = (base_queryset.values('month').annotate(
            num_users=Count('user_id'),
            avg_forms=Avg('num_of_forms'),
            std_dev=StdDev('num_of_forms')))

        def _format_rows(query_):
            return [(row['month'].isoformat(), row['num_users'],
                     row['avg_forms'], row['std_dev']) for row in query_]

        headers = [
            'Month', 'Active Users', 'Average forms per user', 'Std Dev'
        ]
        self.stdout.write('All user stats')
        self._print_table(
            headers,
            _format_rows(
                user_stat_from_malt.filter(
                    user_type__in=['CommCareUser']).filter(
                        user_id__in=self.active_not_deleted_users).filter(
                            num_of_forms__gte=performance_threshold)))

        self.stdout.write('System user stats')
        self._print_table(
            headers,
            _format_rows(user_stat_from_malt.filter(username='******')))
Пример #18
0
 def get_rating_average(self, axis=None):
     """ Renvoyer la note moyenne sur un axe """
     ratings = self.ratings.all()
     if axis is not None:
         ratings = ratings.filter(axis=axis)
     result = ratings.aggregate(Avg('rating'))
     return result['rating__avg']
Пример #19
0
def chart(request):
    #sql='select title,avg(point) points from board_movie group by title'
    #data=Movie.objects.raw(sql)
    data=Movie.objects.values('title').annotate(point_avg=Avg('point'))[0:10]
    df=pd.DataFrame(data)
    bigdataPro.make_graph(df.title, df.point_avg)
    return render(request,"chart.html",{"data":data})
Пример #20
0
def range_statistics(start, end):
    """ Returns the statistics (totals) and the number of data points for a target range. """
    queryset = DayStatistics.objects.filter(day__gte=start, day__lt=end)
    number_of_days = queryset.count()
    aggregate = queryset.aggregate(
        total_cost=Sum('total_cost'),
        fixed_cost=Sum('fixed_cost'),
        electricity1=Sum('electricity1'),
        electricity1_cost=Sum('electricity1_cost'),
        electricity1_returned=Sum('electricity1_returned'),
        electricity2=Sum('electricity2'),
        electricity2_cost=Sum('electricity2_cost'),
        electricity2_returned=Sum('electricity2_returned'),
        electricity_merged=Sum(
            models.F('electricity1') + models.F('electricity2')),
        electricity_cost_merged=Sum(
            models.F('electricity1_cost') + models.F('electricity2_cost')),
        electricity_returned_merged=Sum(
            models.F('electricity1_returned') +
            models.F('electricity2_returned')),
        gas=Sum('gas'),
        gas_cost=Sum('gas_cost'),
        temperature_min=Min('lowest_temperature'),
        temperature_max=Max('highest_temperature'),
        temperature_avg=Avg('average_temperature'),
    )
    aggregate.update(dict(number_of_days=number_of_days))

    return aggregate, number_of_days
Пример #21
0
    def add_custom_form_data(self, indicator_name, range_name, xmlns, indicator_type, lower, upper):
        """
        For specific forms add the number of forms completed during the time period (lower to upper)
        In some cases also add the average duration of the forms.
        """
        aggregation = Avg('duration') if indicator_type == TYPE_DURATION else Count('instance_id')

        def millis_to_secs(x):
            return round(x / 1000)

        transformer = millis_to_secs if indicator_type == TYPE_DURATION else None

        results = FormData.objects \
            .values('user_id') \
            .filter(
                xmlns=xmlns,
                domain=self.domain,
                user_id__in=self.users_needing_data) \
            .filter(**self._date_filters('time_end', lower, upper)) \
            .annotate(count=aggregation)

        self._add_data(
            results,
            '{}{}'.format(indicator_name, range_name.title()),
            transformer=transformer)
Пример #22
0
 def test_subquery_row_range_rank(self):
     qs = Employee.objects.annotate(
         highest_avg_salary_date=Subquery(
             Employee.objects.filter(
                 department=OuterRef('department'),
             ).annotate(
                 avg_salary=Window(
                     expression=Avg('salary'),
                     order_by=[F('hire_date').asc()],
                     frame=RowRange(start=-1, end=1),
                 ),
             ).order_by('-avg_salary', 'hire_date').values('hire_date')[:1],
         ),
     ).order_by('department', 'name')
     self.assertQuerysetEqual(qs, [
         ('Adams', 'Accounting', datetime.date(2005, 11, 1)),
         ('Jenson', 'Accounting', datetime.date(2005, 11, 1)),
         ('Jones', 'Accounting', datetime.date(2005, 11, 1)),
         ('Williams', 'Accounting', datetime.date(2005, 11, 1)),
         ('Moore', 'IT', datetime.date(2011, 3, 1)),
         ('Wilkinson', 'IT', datetime.date(2011, 3, 1)),
         ('Johnson', 'Management', datetime.date(2005, 6, 1)),
         ('Miller', 'Management', datetime.date(2005, 6, 1)),
         ('Johnson', 'Marketing', datetime.date(2009, 10, 1)),
         ('Smith', 'Marketing', datetime.date(2009, 10, 1)),
         ('Brown', 'Sales', datetime.date(2007, 6, 1)),
         ('Smith', 'Sales', datetime.date(2007, 6, 1)),
     ], transform=lambda row: (row.name, row.department, row.highest_avg_salary_date))
Пример #23
0
def management_adviser_timing_ajax(request, context=None):
    if context is None:
        context = {}
    start_date, end_date, referrer = filter_data(request)

    timing_data = []

    adviser_queue_objects = AdviserQueue.objects.filter(
        interaction_ended__gte=start_date,
        interaction_ended__lte=end_date).exclude(_claim_time=None)

    if referrer is not None:
        user_referrals = UserReferral.objects.filter(
            referrer=referrer).values('user__email')
        adviser_queue_objects = adviser_queue_objects.filter(
            email__in=user_referrals)

    advisers = User.objects.filter(
        pk__in=adviser_queue_objects.values_list('adviser', flat=True))

    for adviser in advisers:
        timing_data.append({
            'adviser':
            adviser.email,
            'claim_time':
            timedelta(seconds=adviser_queue_objects.filter(
                adviser=adviser).aggregate(
                    claim_time=Avg('_claim_time'))['claim_time'])
        })

    context['timing_data'] = timing_data

    return render(request, 'concierge/management/adviser_timing.html', context)
Пример #24
0
 def avaregereview(self):
     reviews = Comment.objects.filter(
         product=self, status='True').aggregate(avarage=Avg('rate'))
     avg = 0
     if reviews["avarage"] is not None:
         avg = float(reviews["avarage"])
     return avg
Пример #25
0
def vendas_por_mesa(dias):
    def split_and_strip(string, sep=","):
        """Splits a string using `sep` as a separator, and strips whitespace from the splitted results."""
        return map(lambda s: s.strip(), string.split(sep))

    def primeira_mesa(mesas):
        try:
            return int(split_and_strip(mesas)[0])
        except:
            return split_and_strip(mesas)[0]

    vendas = Venda.objects.filter(dia__in=dias)
    agrupado = vendas.values("mesa").order_by("mesa")
    dados = agrupado.annotate(vendas=Count("id"),
                              entrada=Sum("conta"),
                              pessoas=Sum("num_pessoas"),
                              permanencia_media=Avg("permanencia"))
    headers = ("mesa", "vendas", "entrada", "pessoas", "permanencia_media")

    for row in dados:
        row["permanencia_media"] = secs_to_time(row["permanencia_media"])
        row["entrada"] = format_currency(row["entrada"])
        row["mesa"] = row["mesa"].replace("/", ",")

    body = [[row[col] for col in headers] for row in dados]
    body.sort(key=lambda row: primeira_mesa(row[0]))

    return {"title": "Vendas por mesa", "headers": headers, "body": body}
Пример #26
0
    def rows(self):
        locations = SQLLocation.objects.filter(parent__location_id=self.config['location_id'])
        rows = []
        for loc in locations:
            try:
                org_summary = OrganizationSummary.objects.filter(
                    location_id=loc.location_id,
                    date__range=(self.config['startdate'], self.config['enddate'])
                ).aggregate(average_lead_time_in_days=Avg('average_lead_time_in_days'))
            except OrganizationSummary.DoesNotExist:
                continue

            avg_lead_time = org_summary['average_lead_time_in_days']

            if avg_lead_time:
                avg_lead_time = "%.1f" % avg_lead_time
            else:
                avg_lead_time = "None"

            url = make_url(DeliveryReport, self.config['domain'],
                           '?location_id=%s&filter_by_program=%s&'
                           'datespan_type=%s&datespan_first=%s&datespan_second=%s',
                           (loc.location_id,
                            self.config['program'], self.config['datespan_type'],
                            self.config['datespan_first'], self.config['datespan_second']))

            rows.append([link_format(loc.name, url), avg_lead_time])
        return rows
Пример #27
0
class CompanyChartsViewSet(GenericViewSet, ListModelMixin):
    queryset = Company.objects.annotate(__session_average=Subquery(
        Session.objects.filter(company=OuterRef("id")).annotate(
            value=ExpressionWrapper(Avg("answered_questions__value") *
                                    F("set__weight"),
                                    output_field=DecimalField(
                                        max_digits=3,
                                        decimal_places=2,
                                    ))).values("value"))).annotate(
                                        date=Case(
                                            When(sessions__until__lte=Now(),
                                                 then=F("sessions__until")),
                                            default=Now()),
                                        data=ExpressionWrapper(
                                            Sum("__session_average") *
                                            F("sessions__theme__weight"),
                                            output_field=DecimalField(
                                                decimal_places=2,
                                                max_digits=3))).values(
                                                    "data", "date")

    serializer_class = CompanyChartSerializer

    def filter_queryset(self, queryset):
        return queryset.filter(id=self.request.user.member.company_id)
Пример #28
0
    def _refresh_title_norms(self, full=False):
        """
        Refreshes the value of the title_norm field.

        This needs to be set to 'lavg/ld' where:
         - lavg is the average length of titles in all documents (also in terms)
         - ld is the length of the title field in this document (in terms)
        """

        lavg = (self.entries.annotate(
            title_length=Length("title")).filter(title_length__gt=0).aggregate(
                Avg("title_length"))["title_length__avg"])

        if full:
            # Update the whole table
            # This is the most accurate option but requires a full table rewrite
            # so we can't do it too often as it could lead to locking issues.
            entries = self.entries

        else:
            # Only update entries where title_norm is 1.0
            # This is the default value set on new entries.
            # It's possible that other entries could have this exact value but there shouldn't be too many of those
            entries = self.entries.filter(title_norm=1.0)

        entries.annotate(title_length=Length("title")).filter(
            title_length__gt=0).update(title_norm=lavg / F("title_length"))
Пример #29
0
def add_unit_to_battle_in_progress(battle_organization: BattleOrganization,
                                   world_unit: WorldUnit):
    battle = battle_organization.side.battle
    if world_unit.owner_character:
        battle_character = BattleCharacter.objects.get_or_create(
            battle_organization=battle_organization,
            character=world_unit.owner_character,
            present_in_battle=(world_unit.owner_character.location.tile ==
                               world_unit.location.tile))[0]
        battle_character_in_turn = BattleCharacterInTurn.objects.get_or_create(
            battle_character=battle_character,
            battle_turn=battle.get_latest_turn())
    else:
        battle_character = None
    battle_unit = BattleUnit.objects.create(
        battle_organization=battle_organization,
        owner=battle_character,
        world_unit=world_unit,
        starting_manpower=world_unit.get_fighting_soldiers().count(),
        battle_side=battle_organization.side,
        name=world_unit.name,
        type=world_unit.type)
    create_contubernia(battle_unit)

    position_generator = joining_contubernium_position_generator()

    for contub in battle_unit.battlecontubernium_set.all():
        coords = next(position_generator)
        while battle.get_latest_turn().get_contubernium_in_position(
                coords) is not None:
            coords = next(position_generator)

        contub.x_offset_to_formation = coords.x
        contub.z_offset_to_formation = coords.z
        contub.starting_x_pos = coords.x if battle_organization.side.z else -coords.x
        contub.starting_z_pos = coords.z + 10 if battle_organization.side.z else -coords.z - 10
        contub.save()

    battle_unit.starting_x_pos = math.floor(
        battle_unit.battlecontubernium_set.all().aggregate(
            Avg('starting_x_pos'))['starting_x_pos__avg'])
    battle_unit.starting_z_pos = math.floor(
        battle_unit.battlecontubernium_set.all().aggregate(
            Avg('starting_z_pos'))['starting_z_pos__avg'])
    battle_unit.save()

    set_contubernia_starting_pos(battle_unit)
Пример #30
0
    def rows(self):
        rows = []
        products_ids = self.get_products_ids()

        if not self.config['location_id']:
            return rows

        location = SQLLocation.objects.get(
            location_id=self.config['location_id'])
        sql_locations = SQLLocation.objects.filter(
            parent__location_id=self.config['location_id'])
        is_mohsw = False
        stockouts_map = {}
        product_availabilities = {
            (pa['location_id'], pa['product']):
            (pa['without_stock'], pa['total'])
            for pa in ProductAvailabilityData.objects.filter(
                location_id__in=list(
                    sql_locations.values_list('location_id', flat=True)),
                date__range=(
                    self.config['startdate'],
                    self.config['enddate'])).values('location_id', 'product').
            annotate(without_stock=Avg('without_stock'), total=Max('total'))
        }
        if location.location_type.name == 'MOHSW':
            is_mohsw = True
            stockouts_map = self.get_stockouts_map(self.config['enddate'],
                                                   location)

        for sql_location in sql_locations.exclude(is_archived=True):
            facilities = get_facilities(sql_location, self.config['domain'])
            facilities_count = facilities.count()

            soh_late, soh_not_responding, soh_on_time = self.get_soh_data(
                sql_location, facilities_count)
            if not is_mohsw:
                percent_stockouts = self.get_stockouts(facilities)
            else:
                if facilities_count > 0:
                    stockouts = stockouts_map.get(sql_location.location_id, 0)
                    percent_stockouts = stockouts * 100 / float(
                        facilities_count)
                else:
                    percent_stockouts = 0

            row_data = self._format_row(percent_stockouts, soh_late,
                                        soh_not_responding, soh_on_time,
                                        sql_location)
            for product_id in products_ids:
                product_availability = product_availabilities.get(
                    (sql_location.location_id, product_id))
                if product_availability and product_availability[1] != 0:
                    row_data.append(
                        format_percent(product_availability[0] * 100 /
                                       float(product_availability[1])))
                else:
                    row_data.append("<span class='no_data'>No Data</span>")
            rows.append(row_data)
        return rows