Exemplo n.º 1
0
    def test_aggregations(self):
        for age, birthday in (
            [4, (2007, 12, 25)],
            [4, (2006, 1, 1)],
            [1, (2008, 12, 1)],
            [4, (2006, 6, 1)],
            [12, (1998, 9, 1)],
        ):
            Person.objects.create(age=age, birthday=datetime(*birthday))

        aggregates = Person.objects.aggregate(Min('age'),
                                              Max('age'),
                                              avgage=Avg('age'))
        self.assertEqual(aggregates, {
            'age__min': 1,
            'age__max': 12,
            'avgage': 5.0
        })

        # With filters and testing the sqlaggregates->mongoaggregate
        # conversion.
        aggregates = Person.objects.filter(age__gte=4).aggregate(
            Min('birthday'), Max('birthday'), Avg('age'), Count('id'))
        self.assertEqual(
            aggregates, {
                'birthday__max': datetime(2007, 12, 25, 0, 0),
                'birthday__min': datetime(1998, 9, 1, 0, 0),
                'age__avg': 6.0,
                'id__count': 4,
            })
Exemplo n.º 2
0
 def define_prices(self):
     return self.aggregate(
         ads_count=Count('id'),
         max_price_uzs=Max('price_uzs'),
         min_price_uzs=Min('price_uzs'),
         avg_price_uzs=Avg('price_uzs'),
         max_price_usd=Max('price_usd'),
         min_price_usd=Min('price_usd'),
         avg_price_usd=Avg('price_usd'),
     )
Exemplo n.º 3
0
    def stats(cls, events=None):
        '''
        Returns stats on the events queryset provided.

        :param events: A queryset of events, that have the fields sessions, games, players
        '''
        if events is None:
            events = cls.implicit()

        if events:
            # Tailwind's Median aggregator does not work on Durations (PostgreSQL Intervals)
            # So we have to convert it to Epoch time. Extract is a Django method that can extract
            # 'epoch' which is the documented method of casting a PostgreSQL interval to epoch time.
            #    https://www.postgresql.org/message-id/19495.1059687790%40sss.pgh.pa.us
            # Django does not document 'epoch' alas but it works:
            #    https://docs.djangoproject.com/en/4.0/ref/models/database-functions/#extract
            # We need a Django ExpressionWrapper to cast the uration field to DurationField as
            # for some reason even though it's a PostgreSQL interval, Django still thinks of it
            # as a DateTimeField (from the difference of two DateTimeFields I guess and a bug/feature)
            # that fails tor ecast a difference of DateTimeFiled's as DurationField.
            epoch_duration = Extract(ExpressionWrapper(F('duration'), output_field=DurationField()), lookup_name='epoch')
            epoch_gap = Extract(ExpressionWrapper(F('gap_time'), output_field=DurationField()), lookup_name='epoch')

            result = events.aggregate(Min('sessions'),
                                      Avg('sessions'),
                                      Median('sessions'),
                                      Max('sessions'),
                                      Min('games'),
                                      Avg('games'),
                                      Median('games'),
                                      Max('games'),
                                      Min('players'),
                                      Avg('players'),
                                      Median('players'),
                                      Max('players'),
                                      duration__min=Min('duration'),
                                      duration__avg=Avg('duration'),
                                      duration__median=Median(epoch_duration),
                                      duration__max=Max('duration'),
                                      gap__min=Min('gap_time'),
                                      gap__avg=Avg('gap_time'),
                                      gap__median=Median(epoch_gap),
                                      gap__max=Max('gap_time'))

            # Aggregate is a QuerySet enpoint (i.e results in evaluation of the Query and returns
            # a standard dict. To wit we can cast teh Epch times back to Durations for the consumer.
            result['duration__median'] = timedelta(seconds=result['duration__median'])
            result['gap__median'] = timedelta(seconds=result['gap__median'])
        else:
            result = None

        return result
Exemplo n.º 4
0
 def times_classes(self, prog):
     ssi_times_dict = dict(
         StudentSubjectInterest.objects.filter(subject__parent_program=prog)
         # GROUP BY user, SELECT user and min start date.
         # Don't you love django ORM syntax?
         .values_list('user').annotate(Min('start_date')))
     sr_times = StudentRegistration.objects.filter(
         section__parent_class__parent_program=prog).values_list(
             'user').annotate(Min('start_date'))
     for id, sr_time in sr_times:
         if id not in ssi_times_dict or sr_time < ssi_times_dict[id]:
             ssi_times_dict[id] = sr_time
     return sorted(ssi_times_dict.itervalues())
Exemplo n.º 5
0
def total_contrib_data(request):
    """
    View function URL : '/accounting/contributions/total/data/'
    """
    try:
        REQ = request.GET if request.method == 'GET' else request.POST
        from_date = timezone.make_aware(
            datetime.strptime(REQ.get('from_date', None), DATE_PATTERN),
            timezone.get_current_timezone())
        to_date = timezone.make_aware(
            datetime.strptime(REQ.get('to_date', None), DATE_PATTERN),
            timezone.get_current_timezone())
    except (TypeError, KeyError, ValueError):
        from_date = JournalEntry.objects.all().aggregate(
            date=Min("date"))["date"]
        if from_date is None: from_date = datetime.utcfromtimestamp(0)
        to_date = JournalEntry.objects.all().aggregate(
            date=Max("date"))["date"]
        if to_date is None: to_date = timezone.now()

    query = JournalEntry.objects.filter(type__in=OPERATION_TYPES,
                                        date__gte=from_date,
                                        date__lte=to_date)
    total_contribs = query.aggregate(sum=Sum('amount'))['sum']

    return HttpResponse(print_float(total_contribs))
Exemplo n.º 6
0
def range_statistics(start, end):
    """ Returns the statistics (totals) and the number of data points for a target range. """
    queryset = DayStatistics.objects.filter(day__gte=start, day__lt=end)
    number_of_days = queryset.count()
    aggregate = queryset.aggregate(
        total_cost=Sum('total_cost'),
        fixed_cost=Sum('fixed_cost'),
        electricity1=Sum('electricity1'),
        electricity1_cost=Sum('electricity1_cost'),
        electricity1_returned=Sum('electricity1_returned'),
        electricity2=Sum('electricity2'),
        electricity2_cost=Sum('electricity2_cost'),
        electricity2_returned=Sum('electricity2_returned'),
        electricity_merged=Sum(
            models.F('electricity1') + models.F('electricity2')),
        electricity_cost_merged=Sum(
            models.F('electricity1_cost') + models.F('electricity2_cost')),
        electricity_returned_merged=Sum(
            models.F('electricity1_returned') +
            models.F('electricity2_returned')),
        gas=Sum('gas'),
        gas_cost=Sum('gas_cost'),
        temperature_min=Min('lowest_temperature'),
        temperature_max=Max('highest_temperature'),
        temperature_avg=Avg('average_temperature'),
    )
    aggregate.update(dict(number_of_days=number_of_days))

    return aggregate, number_of_days
Exemplo n.º 7
0
    def GetDataValue(self, key, index):
        unit = self.input[7]
        now = datetime.datetime.now()
        nearest_time = timezone.get_current_timezone().localize(
            datetime.datetime(year=now.year, month=now.month, day=now.day,
                     hour=now.hour, minute=now.minute - (now.minute % unit),
                     second=0)) - datetime.timedelta(minutes=unit * (self.input[4] - index - 1))

        print now, self.input[4], index, nearest_time
        prices = Price.objects.filter(created__gte=nearest_time - datetime.timedelta(minutes=unit),
                                      created__lt=nearest_time).order_by('created')

        if key == 0:
            return now.strftime('%Y%m%d')
        elif key == 1:
            return nearest_time.hour * 100 + nearest_time.minute
        elif key == 2:
            return prices[0].value
        elif key == 3:
            return prices.aggregate(Max('value'))['value__max']
        elif key == 4:
            return prices.aggregate(Min('value'))['value__min']
        elif key == 5:
            return prices[prices.count() - 1].value
        else:
            return self.value
Exemplo n.º 8
0
    def test_outerref_in_cte_query(self):
        # This query is meant to return the difference between min and max
        # order of each region, through a subquery
        min_and_max = With(
            Order.objects.filter(region=OuterRef("pk")).values(
                'region')  # This is to force group by region_id
            .annotate(
                amount_min=Min("amount"),
                amount_max=Max("amount"),
            ).values('amount_min', 'amount_max'))
        regions = (Region.objects.annotate(
            difference=Subquery(min_and_max.queryset().with_cte(
                min_and_max).annotate(difference=ExpressionWrapper(
                    F('amount_max') - F('amount_min'),
                    output_field=int_field,
                ), ).values('difference')[:1],
                                output_field=IntegerField())).order_by("name"))
        print(regions.query)

        data = [(r.name, r.difference) for r in regions]
        self.assertEqual(data, [("bernard's star", None), ('deimos', None),
                                ('earth', 3), ('mars', 2), ('mercury', 2),
                                ('moon', 2), ('phobos', None),
                                ('proxima centauri', 0),
                                ('proxima centauri b', 2), ('sun', 0),
                                ('venus', 3)])
Exemplo n.º 9
0
    def get_fetal_movement_chart(cls, user: User, from_date: date,
                                 to_date: date):
        qs = (cls.objects.filter(
            owned_by=user,
            kick_time__date__gte=from_date,
            kick_time__date__lte=to_date,
        ).datetimes("kick_time", "day").annotate(
            count=Count("pk"),
            start=Min("kick_time__hour"),
            stop=Max("kick_time__hour"),
        ).values("count", "start", "stop", "kick_time__date"))

        data_per_day = []
        for item in qs:
            data_per_day.append({
                "date":
                item["kick_time__date"].strftime("%d %b"),
                "start":
                item["start"],
                "stop":
                item["stop"],
                "count":
                item["count"],
            })
        return data_per_day
Exemplo n.º 10
0
    def get_ordered_items(cls, order):
        if len(order) == 0:
            order = '-id'

        if order not in cls.ORDERING and \
                (order[0] == '-' and order[1:] not in cls.ORDERING):
            order = '-id'

        asc = '-' if order[0] == '-' else ''

        queryset = cls.objects.all()
        if order in ('name', '-name'):
            queryset = queryset.order_by(asc +
                                         'last_name').order_by(asc +
                                                               'first_name')
        elif order in ('age', '-age'):
            queryset = queryset.order_by(('' if asc == '-' else '-') +
                                         'birth_date')
        elif order in ('creation', '-creation'):
            queryset = queryset.annotate(
                creation=Min('history__id')).order_by(asc + 'creation')
        elif order in ('modification', '-modification'):
            queryset = queryset.annotate(
                modification=Max('history__id')).order_by(asc + 'modification')
        elif order in ('id_card', '-id_card'):
            queryset = queryset.order_by(asc + 'id_card_prefix').order_by(
                asc + 'id_card_number')
        else:
            queryset = queryset.order_by(order)

        return queryset
Exemplo n.º 11
0
 def month_stats(self):
     today = date.today()
     objs = self.user.record_set.aggregate(Min("date"))
     first_day = objs["date__min"] if objs else date.today()
     if not first_day:
         return []
     month = first_day.month
     year = first_day.year
     data = []
     while year <= today.year:
         while month <= today.month:
             res = self.user.record_set.filter(date__year=year,
                                               date__month=month).aggregate(
                                                   Sum("cost"))
             if res:
                 data.append({
                     "label": "%d/%d" % (month, year),
                     "cost": res["cost__sum"]
                 })
             else:
                 data.append({
                     "label": "%d/%d" % (month, year),
                     "cost": 0.0
                 })
             if month == 12:
                 year += 1
                 month = 1
                 break
             month += 1
         year += 1
     return data
def migrate_conference_from_to_dates(apps, schema_editor):
    """ One-Time sets the from_date and to_date for all existing conferences
        if not already set, to the dates of their min/max conference event times """

    group_app_name, group_model_name = settings.COSINNUS_GROUP_OBJECT_MODEL.split(
        '.')
    CosinnusGroup = apps.get_model(group_app_name, group_model_name)
    try:
        ConferenceEvent = apps.get_model('cosinnus_event', 'ConferenceEvent')
    except:
        # we can ignore migration for environments without cosinnus_event installed
        ConferenceEvent = None

    if ConferenceEvent:
        for group in CosinnusGroup.objects.all():
            if group.type == 2:  # is conference
                needs_save = False
                if not group.from_date:
                    queryset = ConferenceEvent.objects.filter(
                        room__group=group)
                    if queryset.count() > 0:
                        group.from_date = queryset.aggregate(
                            Min('from_date'))['from_date__min']
                        needs_save = True
                if not group.to_date:
                    queryset = ConferenceEvent.objects.filter(
                        room__group=group)
                    if queryset.count() > 0:
                        group.to_date = queryset.aggregate(
                            Max('to_date'))['to_date__max']
                        needs_save = True

                if needs_save:
                    group.save()
Exemplo n.º 13
0
    def claim(self):
        """Put people into this division.
        
        If all members of a team move to the same division, they stay in a team. If some of the members
        move to a new division, only the members that stay in the original division will stay in a team.
        """

        links = self.filter_eventlinks()
        links.exclude(division=self).update(division=self)

        if not self.event.is_team:
            return

        teams = set((x.team.id for x in links if x.team is not None))
        teams = EventLink.objects.annotate(
            min_div=Min('eventlink__division__pk'),
            max_div=Max('eventlink__division__pk'),
            count=Count('eventlink')).filter(id__in=teams)

        # If everyone moved to the division, update the team
        teams.filter(min_div=self.id, max_div=self.id).update(division=self)

        # Split teams
        split_teams = teams.exclude(division=self)
        links.filter(team__in=split_teams).update(team=None)
Exemplo n.º 14
0
 def times_medical(self, prog):
     return list(
         Record.objects.filter(
             program=prog,
             event__in=('med', 'med_bypass')).values('user').annotate(
                 Min('time')).order_by('time__min').values_list('time__min',
                                                                flat=True))
Exemplo n.º 15
0
def member_contrib(request):
    """
    View function URL : '/accounting/contributions/'
    """
    from_date = JournalEntry.objects.all().aggregate(date=Min("date"))["date"]
    if from_date is None: from_date = datetime.utcfromtimestamp(0)
    to_date = JournalEntry.objects.all().aggregate(date=Max("date"))["date"]
    if to_date is None: to_date = timezone.now()

    query = JournalEntry.objects.filter(type__in=OPERATION_TYPES,
                                        date__gte=from_date,
                                        date__lte=to_date)
    total_contribs = query.aggregate(sum=Sum('amount'))['sum']
    data = {
        'scan_date': UpdateDate.get_latest(JournalEntry),
        'from_date': datetime.strftime(from_date, DATE_PATTERN),
        'to_date': datetime.strftime(to_date, DATE_PATTERN),
        'total_contribs': total_contribs,
        'datatables_defaults': DATATABLES_DEFAULTS,
        'member_contrib_columns': MEMBER_CONTRIB_COLUMNS,
        'system_contrib_columns': SYSTEM_CONTRIB_COLUMNS,
        'player_contrib_columns': PLAYER_CONTRIB_COLUMNS,
        'member_ajax_url': '/accounting/contributions/members/data/',
        'system_ajax_url': '/accounting/contributions/systems/data/',
        'player_ajax_url': '/accounting/contributions/players/data/',
        'sorting': [[1, 'desc']],
    }
    return render_to_response("ecm/accounting/contrib.html", data,
                              Ctx(request))
Exemplo n.º 16
0
def allSignatures(request):
    entries = Bucket.objects.annotate(
        size=Count('crashentry'), quality=Min('crashentry__testcase__quality'))
    return render(request, 'signatures/index.html', {
        'isAll': True,
        'siglist': entries
    })
Exemplo n.º 17
0
def plot_page(request):
    max_days = timedelta(days=15)
    default_date_range = timedelta(days=1)

    max_end_time = ceil_datetime(timezone.localtime(timezone.now()),
                                 DATE_SLIDER_STEP)
    default_end_time = max_end_time

    first_record_time = GPSLogNew.objects.aggregate(
        first_time=Min('datetime')).get('first_time', None)
    if first_record_time:
        max_start_time = floor_datetime(timezone.localtime(first_record_time),
                                        DATE_SLIDER_STEP)
        if (max_end_time - max_start_time) > max_days:
            max_start_time = max_end_time - max_days
    else:
        max_start_time = max_end_time - max_days
    default_start_time = default_end_time - default_date_range

    data = {
        'device_ids':
        GPSLogNew.objects.values_list('device_id', flat=True).distinct(),
        'default_start_time':
        default_start_time.strftime(DATE_SLIDER_TIME_FORMAT),
        'default_end_time':
        default_end_time.strftime(DATE_SLIDER_TIME_FORMAT),
        'max_start_time':
        max_start_time.strftime(DATE_SLIDER_TIME_FORMAT),
        'max_end_time':
        max_end_time.strftime(DATE_SLIDER_TIME_FORMAT),
        'step':
        DATE_SLIDER_STEP
    }
    return render(request, 'plot.html', data)
Exemplo n.º 18
0
    def test_case_results(self, test_case):
        # type: (TestCase) -> dict
        start = test_case.result_test_case.earliest('x_created').x_created
        end = test_case.result_test_case.latest('x_created').x_created

        delta = end - start  # timedelta

        days = tuple((start + timedelta(days=i)).date() for i in range(delta.days + 1))[-10:]
        teams = tuple(unique(
            test_case.result_test_case.values_list(
                'author__team__leader_login',
                flat=True
            ).order_by('author__team__leader_login')
        ))
        data = [
            ['Date', *teams],
            *(
                [day.strftime('%d. %m.'), *(
                    test_case.result_test_case.filter(
                        x_created__date=day,
                        author__team__leader_login=login
                    ).aggregate(min=Min(F('operand_price') + F('instruction_price'))).get('min') for login in teams
                )] for day in days
            )
        ]

        return dict(
            days=days,
            teams=teams,
            data=data
        )
Exemplo n.º 19
0
def SendEmailToFirstPersonInQueue(officeId,WaitingList):
  
  lowestSpot = Waiting.objects.all().filter(office = officeId).aggregate(Min("spot"))
  waitEntry = Waiting.objects.get(office = officeId,spot=lowestSpot['spot__min'])
  waiterSerializier = WaitingSerializer(waitEntry)
 
  personToGetMail = Person.objects.get(id = waiterSerializier.data['person'])
  persSerializier = PersonSerializer(personToGetMail)
  userToGetMail = User.objects.get(id=persSerializier.data['user'])
  userSerializer = UserFullSerializer(userToGetMail)
  
  if WaitingList > 1 :
    WaitingTobeUpdated = Waiting.objects.filter(office=officeId,spot__gte=lowestSpot['spot__min']).update(spot=F('spot')-1)
  
  html_content = '<p>Accesați linkul atașat în scopul programării în centrul ales la înscrierea în lista de așteptare <a href="http://localhost:8080/office/'+str(persSerializier.data['id'])+'/'+ str(officeId)+'">\n Click aici pentru a vă programa!</p>'
  data = {}
  data['email_subject'] = 'Loc vaccinare disponibil'
  # data["email_body"] = ' Accesați linkul atașat în scopul programării în centrul ales la înscrierea în lista de așteptare \n http://localhost:8080/office/'+str(officeId)+'/'+ str(persSerializier.data['id'])
  data["email_body"]=html_content
  data['to_email'] = userSerializer.data['email']
  try :
    Util.send_email(data)
  except Exception as e :
    return e

  waitEntry.delete()
  if WaitingList > 1 :
    Waiting.objects.filter(office=officeId,spot__gte=lowestSpot['spot__min']).update(spot=F('spot')-1)
Exemplo n.º 20
0
def _get_efficiency_statistics(model, owner, field="efficiency", count=False, worth=False, worth_field='value'):
    eff_values = {}
    eff_map = {
        "efficiency__avg": "Efficiency (Average)",
        "efficiency__stddev": "Efficiency (Standard Deviation)",
        "efficiency__min": "Efficiency (Minimum)",
        "efficiency__perc25": "Efficiency (25th Percentile)",
        "efficiency__median": "Efficiency (Median)",
        "efficiency__perc75": "Efficiency (75th Percentile)",
        "efficiency__max": "Efficiency (Maximum)",
        "efficiency__count": "Count",
        "value__sum": "Worth",
    }
    aggregations = [
        Avg(field),
        StdDev(field),
        Min(field),
        Perc25(field),
        Median(field),
        Perc75(field),
        Max(field)
    ]
    if worth:
        aggregations.insert(0, Sum(worth_field))
    if count:
        aggregations.insert(0, Count(field))

    efficiencies = model.objects.filter(owner=owner).aggregate(*aggregations)
    for eff_key, eff_val in efficiencies.items():
        eff_values[eff_map[eff_key]] = round(eff_val or 0, 2)

    return eff_values
Exemplo n.º 21
0
def hits_per_interval(request, days=1):
    """
	hits per day to facebook.html or
	"""
    context = {}
    rows = []
    logs = Log.objects.order_by('time').all()
    # should include the timezone info in this
    dates = Log.objects.aggregate(Max('time'), Min('time'))
    min_date = dates['time__min'].date()
    max_date = dates['time__max'].date()
    dates = date_range(min_date, max_date, days)

    for from_date, to_date in pair_inter(dates):
        count = Log.objects.filter(
            Q(time__gte=from_date) & Q(time__lt=to_date)
            & (Q(request__startswith='GET /facebook.htm')
               | Q(request__startswith='GET /fb.htm'))).count()
        row = Object()
        row.date = from_date
        row.hits = count
        rows.append(row)

    context['rows'] = rows
    context['use_tabs'] = request.GET.get('use_tabs') in ['1', 'true', 'True']
    return render_to_response('analytics/hits.html', context, mimetype='text')
Exemplo n.º 22
0
    def handle(self, filename, **options):

        with ZipFile(filename, 'w') as zipFile:
            for bucket in Bucket.objects.annotate(size=Count('crashentry'),
                                                  quality=Min('crashentry__testcase__quality')):
                bestEntryQuery = (CrashEntry.objects
                                  .filter(bucket_id=bucket.pk)
                                  .filter(testcase__quality=bucket.quality)
                                  .defer('rawCrashData', 'rawStderr', 'rawStdout')
                                  .order_by('testcase__size', '-id'))
                if bestEntryQuery.count():
                    bucket.bestEntry = bestEntryQuery[0]
                else:
                    bucket.bestEntry = None

                metadata = {}
                metadata['size'] = bucket.size
                metadata['shortDescription'] = bucket.shortDescription
                metadata['frequent'] = bucket.frequent
                if bucket.bug is not None:
                    metadata['bug__id'] = bucket.bug.externalId

                if bucket.bestEntry is not None and bucket.bestEntry.testcase is not None:
                    metadata['testcase__quality'] = bucket.bestEntry.testcase.quality
                    metadata['testcase__size'] = bucket.bestEntry.testcase.size

                sigFileName = "%d.signature" % bucket.pk
                metaFileName = "%d.metadata" % bucket.pk

                zipFile.writestr(sigFileName, bucket.signature)
                zipFile.writestr(metaFileName, json.dumps(metadata, indent=4))
Exemplo n.º 23
0
def _calculate_SLA(offset, service):
    UTC_NOW = timezone.now()
    LOCAL_NOW = timezone.localtime(UTC_NOW)
    TIME_OFFSET = LOCAL_NOW.utcoffset()

    utc_start_time = timezone.now().replace(
        hour=0, minute=0, second=0,
        microsecond=0) - datetime.timedelta(days=offset)
    utc_stop_time = timezone.now().replace(
        hour=0, minute=0, second=0,
        microsecond=0) - datetime.timedelta(days=(offset - 1))
    local_start_time = utc_start_time - TIME_OFFSET
    local_stop_time = utc_stop_time - TIME_OFFSET

    # retrieve history. It take into account situation where one agent has a failure
    # and there is, at least, another one agent which reports success.

    service_history = ServiceHistory.objects.values('created').\
        filter(service_id=service.id).\
        filter(created__gt=local_start_time).\
        filter(created__lt=local_stop_time).\
        filter(response_state__lt=5).\
        annotate(response_state=Min('response_state')).\
        order_by('created', 'response_state')

    aggregated_failing_time_sec = 0
    break_found = False
    number_of_tries = 0
    _start_time = local_start_time

    for s_history in service_history:
        number_of_tries += 1
        if s_history['response_state'] != 1:
            break_found = True
            diff_time = s_history['created'] - _start_time
            aggregated_failing_time_sec += diff_time.total_seconds()
        else:
            # if previous check returned service down or performance issue, count the time to next proper
            # return as service break.
            if break_found:
                diff_time = s_history['created'] - _start_time
                aggregated_failing_time_sec += diff_time.total_seconds()
            break_found = False
        _start_time = s_history['created']

    # if there is at least one record in services_sevicehistory for selected service, calucate SLA
    # if the day has no records, lets assume, we are not able to calculate SLA
    if number_of_tries > 0:
        # include last period: difference between last entry in servicehistory and 00:00:00 UTC next day
        if break_found:
            diff_time = local_stop_time - _start_time
            aggregated_failing_time_sec += diff_time.total_seconds()

        sla_value = (100 -
                     (aggregated_failing_time_sec / ONE_DAY_SECONDS * 100))
        sla_daily = SlaDaily(service_id=service.id,
                             day=utc_start_time,
                             sla=sla_value)
        sla_daily.save()
 def handle(self, *args, **options):
     sites = SiteRegistration.objects.prefetch_related('sensors').all()
     for site in sites:
         self.update_sensors_activation_date(site)
         min_datetime = site.sensors.aggregate(
             first_light=Min('activation_date'))
         site.deployment_date = min_datetime['first_light']
         site.save(update_fields=['deployment_date'])
Exemplo n.º 25
0
def artistsIndex(request):
    if "artist" in request.GET:
        return HttpResponseRedirect(
            reverse(artistPage, kwargs={"artist_id": request.GET["artist"]}))
    artists = Artist.objects.annotate(sort=Min(
        "album__releases__torrents__added")).all().order_by('-sort')[:5]
    return render_to_response('music/artists/index.html', {'artists': artists},
                              context_instance=RequestContext(request))
Exemplo n.º 26
0
 def test_aggregates(self):
     self.assertEqual(repr(Avg('a')), "Avg(F(a))")
     self.assertEqual(repr(Count('a')), "Count(F(a), distinct=False)")
     self.assertEqual(repr(Max('a')), "Max(F(a))")
     self.assertEqual(repr(Min('a')), "Min(F(a))")
     self.assertEqual(repr(StdDev('a')), "StdDev(F(a), sample=False)")
     self.assertEqual(repr(Sum('a')), "Sum(F(a))")
     self.assertEqual(repr(Variance('a', sample=True)), "Variance(F(a), sample=True)")
Exemplo n.º 27
0
def admin_rounds(request):
    rounds = Round.objects.order_by('id')
    message = ''
    set_active = request.GET.get('set_active', '0')
    try:
        set_active_id = int(set_active)
    except ValueError:
        # Try float.
        set_active_id = 0
    set_inactive = request.GET.get('set_inactive', '0')
    try:
        set_inactive_id = int(set_inactive)
    except ValueError:
        # Try float.
        set_inactive_id = 0
    if set_active_id != 0:
        should_be_active_round = get_object_or_404(Round,
                                                   pk=int(set_active_id))
        should_be_active_round.active = True
        should_be_active_round.save()
        message = _('Round "%s" set as active') % should_be_active_round.name
        messages.add_message(request, messages.INFO, message)

        if settings.SEND_MAIL:
            all_players = Player.objects.\
                exclude(user__email='').filter(user__is_active=True).filter(send_mail_new_round=True)
            start_time = Match.objects.\
                filter(round=should_be_active_round).aggregate(Min('start_time'))['start_time__min']
            logger.info('Sending mail that round %s is active to %d players',
                        should_be_active_round.name, len(all_players))
            for player in all_players:
                with translation.override(player.language):
                    subject = _('[sharkz.bet] New round "%s" available'
                                ) % should_be_active_round.name
                    template = loader.get_template('mail/round_active.html')
                    message_text = template.render({
                        'round': should_be_active_round,
                        'start_time': start_time
                    })
                msg = EmailMessage(subject,
                                   message_text,
                                   '*****@*****.**',
                                   to=[
                                       player.user.email,
                                   ])
                msg.content_subtype = 'html'
                msg.send(fail_silently=False)

    elif set_inactive_id != 0:
        should_be_inactive_round = get_object_or_404(Round,
                                                     pk=int(set_inactive_id))
        should_be_inactive_round.active = False
        should_be_inactive_round.save()
        message = _(
            'Round "%s" set as inactive') % should_be_inactive_round.name
        messages.add_message(request, messages.INFO, message)

    return render(request, 'admin_rounds.html', {'rounds': rounds})
Exemplo n.º 28
0
    def get_context_data(self, **kwargs):
        kwargs['responses'] = self.responses
        kwargs['detailed_comments'] = self.get_detailed_comments()
        kwargs['feedback_by_service'] = self.get_feedback_by_service()
        kwargs['question_labels'] = [q.question_label for q in self.questions]

        if not (self.start_date and self.end_date) and self.responses:
            min_date = self.responses.aggregate(
                Min('datetime'))['datetime__min']
            kwargs['min_date'] = get_week_start(min_date)
            kwargs['max_date'] = timezone.now()
        else:
            kwargs['min_date'] = self.start_date
            kwargs['max_date'] = (self.end_date -
                                  timedelta(1)) if self.end_date else None

        # Feedback stats for chart
        feedback_stats = self.get_feedback_statistics(
            self.lga_clinics,
            start_date=self.start_date,
            end_date=self.end_date)
        kwargs['feedback_stats'] = feedback_stats
        kwargs['max_chart_value'] = max(feedback_stats['sent'])
        kwargs['feedback_clinics'] = self.format_chart_labels(self.lga_clinics)

        # Patient feedback responses
        other_clinics = self.lga_clinics.exclude(pk=self.object.pk)
        current_clinic_stats = self.get_response_statistics(
            (self.object, ), self.questions, self.start_date, self.end_date)
        other_stats = self.get_response_statistics(other_clinics,
                                                   self.questions,
                                                   self.start_date,
                                                   self.end_date)
        margins = [(x[1] - y[1])
                   for x, y in zip(current_clinic_stats, other_stats)]
        kwargs['response_stats'] = zip(self.questions, current_clinic_stats,
                                       other_stats, margins)

        num_registered = self.visits.count()
        num_started = self.visits.filter(survey_started=True).count()
        num_completed = self.visits.filter(survey_completed=True).count()

        if num_registered:
            percent_started = make_percentage(num_started, num_registered)
            percent_completed = make_percentage(num_completed, num_registered)
        else:
            percent_completed = None
            percent_started = None

        kwargs['num_registered'] = num_registered
        kwargs['num_started'] = num_started
        kwargs['percent_started'] = percent_started
        kwargs['num_completed'] = num_completed
        kwargs['percent_completed'] = percent_completed

        # TODO - participation rank amongst other clinics.
        return super(ClinicReport, self).get_context_data(**kwargs)
Exemplo n.º 29
0
 def get(self, request):
     if request.user.is_authenticated():
         histories = OnlineHistory.objects.filter(mac=request.user.username)\
             .values('date').annotate(min_time=Min('time'), max_time=Max('time')).order_by("-date")
         return render(request, "index.html", locals())
     else:
         verification_token = str(uuid.uuid4())
         request.session["verification_token"] = verification_token
         return render(request, "authentication.html", locals())
Exemplo n.º 30
0
    def get_registrations(self, interval, days, graph_key, select_box_value):
        """ Returns an array with new users count per interval."""
        try:
            conf_data = DashboardStats.objects.get(graph_key=graph_key)
            model_name = apps.get_model(conf_data.model_app_name,
                                        conf_data.model_name)
            kwargs = {}
            for i in conf_data.criteria.all():
                # fixed mapping value passed info kwargs
                if i.criteria_fix_mapping:
                    for key in i.criteria_fix_mapping:
                        # value => i.criteria_fix_mapping[key]
                        kwargs[key] = i.criteria_fix_mapping[key]

                # dynamic mapping value passed info kwargs
                if i.dynamic_criteria_field_name and select_box_value:
                    kwargs[i.dynamic_criteria_field_name] = select_box_value

            aggregate = None
            if conf_data.type_operation_field_name and conf_data.operation_field_name:
                operation = {
                    'Count': Count(conf_data.operation_field_name),
                    'Sum': Sum(conf_data.operation_field_name),
                    'Avg': Avg(conf_data.operation_field_name),
                    'StdDev': StdDev(conf_data.operation_field_name),
                    'Max': Max(conf_data.operation_field_name),
                    'Min': Min(conf_data.operation_field_name),
                    'Variance': Variance(conf_data.operation_field_name),
                }
                aggregate = operation[conf_data.type_operation_field_name]

            stats = QuerySetStats(model_name.objects.filter(**kwargs),
                                  conf_data.date_field_name, aggregate)
            # stats = QuerySetStats(User.objects.filter(is_active=True), 'date_joined')
            today = now()
            if days == 24:
                begin = today - timedelta(hours=days - 1)
                return stats.time_series(begin, today + timedelta(hours=1),
                                         interval)

            begin = today - timedelta(days=days - 1)
            return stats.time_series(begin, today + timedelta(days=1),
                                     interval)
        except (LookupError, FieldError, TypeError) as e:
            self.error_message = str(e)
            User = get_user_model()
            stats = QuerySetStats(User.objects.filter(is_active=True),
                                  'date_joined')
            today = now()
            if days == 24:
                begin = today - timedelta(hours=days - 1)
                return stats.time_series(begin, today + timedelta(hours=1),
                                         interval)
            begin = today - timedelta(days=days - 1)
            return stats.time_series(begin, today + timedelta(days=1),
                                     interval)