def get_per_page_report(self): q = super().get_queryset().values('participant', 'app_name', 'page_name', 'player_id', 'participant__code', 'participant__session__code', 'round_number'). \ filter(entry__isnull=False). \ annotate( focused_time=Sum(Case( When(event_num_type__in=focus_exit_codes, then=ExpressionWrapper(F('timestamp') - F('entry__timestamp'), output_field=DurationField())), output_field=DurationField(), )), unfocused_time=Sum(Case( When(event_num_type__in=focus_enter_codes, then=ExpressionWrapper(F('timestamp') - F('entry__timestamp'), output_field=DurationField())), output_field=DurationField(), )), num_unfocus=Count( Case(When(event_num_type=FocusExitEventTypes.FOCUS_OFF.value, then=1), output_field=IntegerField())), ).annotate(total_time=Sum(ExpressionWrapper(F('timestamp') - F('entry__timestamp'), output_field=DurationField()))) return q
def annotate_hours(query): """ Annotates given QuerySet with controlling hours for the current (curr_hours), previous (prev_hours), and penultimate (prev_prev_hours) months. """ MONTH_NOW = timezone.now().month YEAR_NOW = timezone.now().year CURR_MONTH = (Q(sessions__start__month=MONTH_NOW) & Q(sessions__start__year=YEAR_NOW)) PREV_MONTH = ( Q(sessions__start__month=MONTH_NOW - 1 if MONTH_NOW > 1 else 12) & Q(sessions__start__year=YEAR_NOW if MONTH_NOW > 1 else YEAR_NOW - 1)) PREV_PREV_MONTH = ( Q(sessions__start__month=MONTH_NOW - 2 if MONTH_NOW > 2 else 12 if MONTH_NOW > 1 else 11) & Q(sessions__start__year=YEAR_NOW if MONTH_NOW > 2 else YEAR_NOW - 1)) return query.annotate( curr_hours=Coalesce(Sum('sessions__duration', filter=CURR_MONTH), Cast(timedelta(), DurationField())), prev_hours=Coalesce(Sum('sessions__duration', filter=PREV_MONTH), Cast(timedelta(), DurationField())), prev_prev_hours=Coalesce( Sum('sessions__duration', filter=PREV_PREV_MONTH), Cast(timedelta(), DurationField())), )
def _get_tracked_time(self, player_id, participant_id, page_name, focus_type): tot_exits = super().get_queryset().values('participant', 'page_name', 'player_id'). \ filter(player_id=player_id, participant_id=participant_id, page_name=page_name, entry__isnull=False, ). \ aggregate( diff=Sum(Case( When(event_num_type__in=focus_type, then=ExpressionWrapper(F('timestamp') - F('entry__timestamp'), output_field=DurationField())), output_field=DurationField(), )), num_unfocus=Count(Case( When(event_num_type=2, then=1), output_field=IntegerField(), )) # num_unfocus=Count( # Case(When(event_num_type=FocusExitEventTypes.FOCUS_OFF, then=1), output_field=IntegerField())) ) return tot_exits
def __init__(self, user_pk, foia_pks): super(ExportCsv, self).__init__(user_pk, ''.join(str(pk) for pk in foia_pks[:100])) self.foias = (FOIARequest.objects.filter( pk__in=foia_pks).select_related( 'composer__user', 'agency__jurisdiction__parent', ).only( 'composer__user__username', 'title', 'status', 'slug', 'agency__jurisdiction__name', 'agency__jurisdiction__slug', 'agency__jurisdiction__id', 'agency__jurisdiction__parent__name', 'agency__jurisdiction__parent__id', 'agency__name', 'agency__id', 'date_followup', 'date_estimate', 'embargo', 'composer__requested_docs', ).annotate( days_since_submitted=ExtractDay( Cast(Now() - F('composer__datetime_submitted'), DurationField())), days_since_updated=ExtractDay( Cast(Now() - F('datetime_updated'), DurationField())), project_names=StringAgg('projects__title', ',', distinct=True), tag_names=StringAgg('tags__name', ',', distinct=True), ))
def test_subtract_temporals(self): duration_field = DurationField() duration_field_internal_type = duration_field.get_internal_type() msg = ('This backend does not support %s subtraction.' % duration_field_internal_type) with self.assertRaisesMessage(NotSupportedError, msg): self.ops.subtract_temporals(duration_field_internal_type, None, None)
class Moment(Model): game = ForeignKey(Game, on_delete=CASCADE) real_timestamp = DateTimeField() quarter = IntegerField() game_clock = DurationField() shot_clock = DurationField(null=True) class Meta: db_table = 'moment'
def test_subtract_temporals(self): duration_field = DurationField() duration_field_internal_type = duration_field.get_internal_type() msg = ( 'This backend does not support %s subtraction.' % duration_field_internal_type ) with self.assertRaisesMessage(NotSupportedError, msg): self.ops.subtract_temporals(duration_field_internal_type, None, None)
def duration( *args, **kwargs ): meta = _kwargs_( kwargs ) meta['type'] = 'duration' field = DurationField( *args, **kwargs ) field.meta = meta return field
def stats(cls, events=None): ''' Returns stats on the events queryset provided. :param events: A queryset of events, that have the fields sessions, games, players ''' if events is None: events = cls.implicit() if events: # Tailwind's Median aggregator does not work on Durations (PostgreSQL Intervals) # So we have to convert it to Epoch time. Extract is a Django method that can extract # 'epoch' which is the documented method of casting a PostgreSQL interval to epoch time. # https://www.postgresql.org/message-id/19495.1059687790%40sss.pgh.pa.us # Django does not document 'epoch' alas but it works: # https://docs.djangoproject.com/en/4.0/ref/models/database-functions/#extract # We need a Django ExpressionWrapper to cast the uration field to DurationField as # for some reason even though it's a PostgreSQL interval, Django still thinks of it # as a DateTimeField (from the difference of two DateTimeFields I guess and a bug/feature) # that fails tor ecast a difference of DateTimeFiled's as DurationField. epoch_duration = Extract(ExpressionWrapper(F('duration'), output_field=DurationField()), lookup_name='epoch') epoch_gap = Extract(ExpressionWrapper(F('gap_time'), output_field=DurationField()), lookup_name='epoch') result = events.aggregate(Min('sessions'), Avg('sessions'), Median('sessions'), Max('sessions'), Min('games'), Avg('games'), Median('games'), Max('games'), Min('players'), Avg('players'), Median('players'), Max('players'), duration__min=Min('duration'), duration__avg=Avg('duration'), duration__median=Median(epoch_duration), duration__max=Max('duration'), gap__min=Min('gap_time'), gap__avg=Avg('gap_time'), gap__median=Median(epoch_gap), gap__max=Max('gap_time')) # Aggregate is a QuerySet enpoint (i.e results in evaluation of the Query and returns # a standard dict. To wit we can cast teh Epch times back to Durations for the consumer. result['duration__median'] = timedelta(seconds=result['duration__median']) result['gap__median'] = timedelta(seconds=result['gap__median']) else: result = None return result
def get_queryset(self): return super().get_queryset().annotate( start_time=Lower('time_range'), end_time=Upper('time_range'), is_current=Case(When(end_time__isnull=True, then=True), default=False, output_field=BooleanField()), duration=Case( When(is_current=True, then=ExpressionWrapper(Now() - F('start_time'), output_field=DurationField())), default=ExpressionWrapper(F('end_time') - F('start_time'), output_field=DurationField())))
def get(self, request): #buses=Bus.objects.order_by('type').exclude(id=4) #res1 = Driver.objects.filter(bus__in = buses).order_by('bus__type') # num of bus types (not necessarily driven) b_types = Bus.objects.exclude(id=4).values('type').annotate( Count("id")).order_by('type') # num of bus types (tied to drivers => are used) #res = Driver.objects.filter(bus__in = buses).values("bus__type").annotate(Count("bus__type")).order_by('bus__type') # age_exp = Driver.objects.annotate( age=ExpressionWrapper(datetime.date.today() - F('d_of_b'), output_field=DurationField()), exp_days=ExpressionWrapper(datetime.date.today() - F('date_begin'), output_field=DurationField())).values( 'age', 'exp_days', 'surname', 'name', 'passport') avg_age_exp = Driver.objects.annotate( age=ExpressionWrapper(datetime.date.today() - F('d_of_b'), output_field=DurationField()), exp_days=ExpressionWrapper(datetime.date.today() - F('date_begin'), output_field=DurationField())).values( 'age', 'exp_days').aggregate( Avg('age'), Avg('exp_days')) # routes with buses of type X route_for_type1 = Driver.objects.filter( bus__type=1).values('route').distinct() route_for_type2 = Driver.objects.filter( bus__type=2).values('route').distinct() route_for_type3 = Driver.objects.filter( bus__type=3).values('route').distinct() route_for_type4 = Driver.objects.filter( bus__type=4).values('route').distinct() route_for_type5 = Driver.objects.filter( bus__type=5).values('route').distinct() #r = Driver.objects.all().values('bus__type','route__num').order_by('bus__type') #serializer = DriverSerializers(res,many=True) return Response({ "avg_age_exp": avg_age_exp, "age_exp": age_exp, "r_for_t1": route_for_type1, "r_for_t2": route_for_type2, "r_for_t3": route_for_type3, "r_for_t4": route_for_type4, "r_for_t5": route_for_type5, "bus_types": b_types })
def statistic(request, username=''): """View for statistic """ if request.user.username == username: now = datetime.datetime.now() new_now = datetime.datetime.now() cur_year = now.year cur_month = now.month start = now.replace(year=cur_year, month=cur_month, day=1) end = new_now.replace(year=cur_year, month=cur_month, day=30) all_duration = Activities.objects.filter(new=request.user).exclude( add_date__lte=start, add_date__gte=end).aggregate( sum=Sum('activities_duration', output_field=DurationField())) work_duration = Activities.objects.filter( new=request.user, activities_type="Работа").exclude( add_date__lte=start, add_date__gte=end).aggregate(sum=Sum( 'activities_duration', output_field=DurationField())) other_duration = Activities.objects.filter(new=request.user).exclude( activities_type="Работа").exclude( add_date__lte=start, add_date__gte=end).aggregate(sum=Sum( 'activities_duration', output_field=DurationField())) if all_duration['sum'] is None: all_duration['sum'] = datetime.timedelta(0) args = { 'sorry': 'На данный момент вы не добавили ни одной активности' } else: if work_duration['sum'] is None: work_duration['sum'] = datetime.timedelta(0) if other_duration['sum'] is None: other_duration['sum'] = datetime.timedelta(0) percent_of_work_duration = work_duration['sum'] / all_duration[ 'sum'] * 100 percent_of_other_duration = other_duration['sum'] / all_duration[ 'sum'] * 100 args = { 'sum_duration': all_duration['sum'], 'work_duration': work_duration['sum'], 'other_duration': other_duration['sum'], 'percent_of_work_duration': round(percent_of_work_duration, 2), 'percent_of_other_duration': round(percent_of_other_duration, 2) } return render_to_response('statistic.html', args, context_instance=RequestContext(request)) return render_to_response('statistic.html', context_instance=RequestContext(request))
def get_yougest_and_oldest_movie_titles(): yougest_movie_title = MovieCast.objects.annotate(age=ExpressionWrapper( F('cast__birth_date') - date.today(), output_field=DurationField())).values( 'movie_id').annotate(average_age=Avg('age')).order_by( 'average_age').values('movie__title')[:5] oldest_movie_titles = MovieCast.objects.annotate(age=ExpressionWrapper( F('cast__birth_date') - date.today(), output_field=DurationField())).values( 'movie_id').annotate(average_age=Avg('age')).order_by( '-average_age').values('movie__title')[:5] return { 'yougest_movie_title': yougest_movie_title, 'oldest_movie_titles': oldest_movie_titles }
class ModelEvent(BaseModel): """ Used to record model entry events, like modification, removal or adding of values or relationships. """ operation = SmallIntegerField( validators=[MinValueValidator(-1), MaxValueValidator(1)], null=True, choices=DjangoOperations, ) user = ForeignKey(settings.AUTH_USER_MODEL, on_delete=CASCADE, null=True) # maybe don't cascade? entry = ForeignKey(ModelEntry, on_delete=CASCADE) # modifications = None # One2Many -> ModelModification # relationships = None # One2Many -> ModelRelationship # v experimental, opt-in (pickled object) snapshot = PickledObjectField(null=True) performance = DurationField(null=True) class Meta: verbose_name = "Model Event" verbose_name_plural = "Model Events" class LoggingIgnore: complete = True
def totals(self): qs = Listener.objects.none() for date_range in self.date_ranges: qs = qs.union( self.listeners.filter(session__overlap=date_range, ).annotate( period=Value(date_range, DTRangeField()), ).annotate( start=Greatest(RangeStartsWith('period'), RangeStartsWith('session')), end=Least(RangeEndsWith('period'), RangeEndsWith('session')), ).annotate(length=ExpressionWrapper( F('end') - F('start'), output_field=DurationField())).values( self.streams).order_by(self.streams).annotate( hours=ExpressionWrapper( Extract(Sum('length'), 'epoch') / 3600, output_field=FloatField()), period=Value(date_range.lower, DateTimeField()), stream=Cast(self.streams, CharField()), )) return qs.order_by( 'period', self.stream_order, )
def sessions(): return LogEvent\ .objects\ .values('client')\ .annotate(min=Min('event_time'), max=Max('event_time'), number_of_log_entries=Count('id'))\ .annotate(diff=ExpressionWrapper(F('max') - F('min'), output_field=DurationField()))\ .order_by('-min')
def do_periodic_moss_check(): classes = Class.objects.current_semester() min_time_from_deadline = datetime.timedelta(minutes=30) max_time_from_deadline = datetime.timedelta(days=7) assignments = AssignedTask.objects.filter( # active tasks clazz__in=classes, # that have a deadline deadline__isnull=False, ).annotate( # calculate date diff date_diff=ExpressionWrapper( Now() - F("deadline"), output_field=DurationField())).filter( # filter tasks with dates that are not too old or too new date_diff__gte=min_time_from_deadline, date_diff__lte=max_time_from_deadline) tasks = Task.objects.filter(assignedtask__in=assignments) cache = caches["default"] for task in tasks: if (cache.get(moss_result_cache_key(task.id)) is not None or cache.get(moss_job_cache_key(task.id)) is not None): continue logging.info(f"Scheduling MOSS check for {task.id}") enqueue_moss_check(task.id, notify=True)
def append_last_pv_date(self, serializer): delta = ExpressionWrapper(datetime.now() - F('last_pv_date'), output_field=DurationField()) qs = Intervention.objects.exclude(status=Intervention.DRAFT).annotate( last_pv_date=Max( Case(When(travel_activities__travel_type=TravelType. PROGRAMME_MONITORING, travel_activities__travels__traveler=F( 'travel_activities__primary_traveler'), travel_activities__travels__status=Travel.COMPLETED, then=F('travel_activities__date')), output_field=DateTimeField()))) qs = qs.annotate(days_since_last_pv=delta) pv_dates = {} for i in qs: pv_dates[str(i.pk)] = { "last_pv_date": i.last_pv_date, "days_last_pv": i.days_since_last_pv.days if i.days_since_last_pv else None, } # Add last_pv_date for d in serializer.data: pk = d["intervention_id"] d["last_pv_date"] = pv_dates[pk]["last_pv_date"] d["days_last_pv"] = pv_dates[pk]["days_last_pv"] return serializer
class NoticePeriodViewSet(AtomicTransactionModelViewSet): queryset = ( NoticePeriod.objects.all() .annotate(duration_as_interval=Cast("duration", DurationField())) .order_by("duration_as_interval") ) serializer_class = NoticePeriodSerializer
def get_articles(journal, start_date, end_date): dt = timezone.now() f_editorial_delta = ExpressionWrapper( F('date_published') - F('date_submitted'), output_field=DurationField(), ) articles = sm.Article.objects.filter( date_published__lte=dt, ).select_related('section').annotate( editorial_delta=f_editorial_delta) if journal: articles = articles.filter(journal=journal) for article in articles: article.views = mm.ArticleAccess.objects.filter( article=article, accessed__gte=start_date, accessed__lte=end_date, type='view') article.downloads = mm.ArticleAccess.objects.filter( article=article, accessed__gte=start_date, accessed__lte=end_date, type='download') return articles
def __init__(self): super().__init__() self.pickorder_length = None self.missions = Mission.objects.filter( productmission__sku__product__ean__isnull=False, online_picklist__isnull=True, is_online=True, not_matchable__isnull=True, ignore_pickorder__isnull=True) self.missions = self.missions.annotate(delta=Case( When(purchased_date__gte=Now(), then=F('purchased_date') - Now()), When(purchased_date__lt=Now(), then=Now() - F('purchased_date')), output_field=DurationField())).order_by("delta") self.missions = self.missions.exclude( Q( Q(online_transport_service__name__iexact="dhl") | Q(ignore_pickorder=True)) | Q(delta__gte=timedelta(days=20)) | Q(not_matchable=True)) self.picklist_data = None self.pickorder = None self.missions_products = None self.stocks = None self.used_stocks = {} self.missions_pick_rows = None self.packing_stations = PackingStation.objects.filter( pickorder__isnull=True) self.packing_station_current_user = None self.refill_order = None self.pickorder = None self.limit_result = None self.online_prefixes = OnlinePositionPrefix.objects.all()
class Flight(models.Model): class Meta: verbose_name = FLIGHT_VERBOSE_NAME verbose_name_plural = FLIGHT_VERBOSE_NAME_PLURAL number = IntegerField(FLIGHT_NUMBER_KEY, validators=[flight_number_validate]) airline = ForeignKey(Airline, verbose_name=FLIGHT_AIRLINE_KEY) departure = ForeignKey(Airport, verbose_name=FLIGHT_DEPARTURE_KEY, related_name='airport_dep') arrival = ForeignKey(Airport, verbose_name=FLIGHT_ARRIVAL_KEY, related_name='airport_arr') duration = DurationField(FLIGHT_DURATION_KEY, ) def __str__(self): return '{codename} ({dep}-{arr})'.format(codename=self.codename(), dep=self.departure.name, arr=self.arrival.name) def codename(self): return '{airline_code}-{flight_code:04}'.format( airline_code=self.airline.code, flight_code=self.number, )
class Click(BaseAttribute): ad = ForeignKey(to=Ad, related_name='clicks', on_delete=CASCADE) duration = DurationField() def __str__(self): return str(self.ad.id) + ' : ' + self.ip + ' - ' + str(self.time)
class Event(Model): game = ForeignKey(Game, on_delete=CASCADE) eventnum = IntegerField() period = IntegerField() msg_type = IntegerField() msg_action_type = IntegerField() ev_real_time = TimeField() ev_game_clock = DurationField() home_desc = CharField(max_length=256, null=True) neutral_desc = CharField(max_length=256, null=True) visitor_desc = CharField(max_length=256, null=True) home_score_after = IntegerField(default=0) visitor_score_after = IntegerField(default=0) home_team_fouls_after = IntegerField(default=0) visitor_team_fouls_after = IntegerField(default=0) person1_type = IntegerField(null=True) player1 = ForeignKey(Player, null=True, related_name='p1', on_delete=CASCADE) player1_team = ForeignKey(Team, null=True, related_name='p1_team', on_delete=CASCADE) person2_type = IntegerField(null=True) player2 = ForeignKey(Player, null=True, related_name='p2', on_delete=CASCADE) player2_team = ForeignKey(Team, null=True, related_name='p2_team', on_delete=CASCADE) person3_type = IntegerField(null=True) player3 = ForeignKey(Player, null=True, related_name='p3', on_delete=CASCADE) player3_team = ForeignKey(Team, null=True, related_name='p3_team', on_delete=CASCADE) def __str__(self): return ', '.join([desc for desc in [self.home_desc, self.neutral_desc, self.visitor_desc] if desc is not None]) + ' with %s left in period %s (#%s)' % (self.ev_game_clock, self.period, self.eventnum) def desc_for_poss_dropdown(self): return ', '.join([desc for desc in [self.home_desc, self.neutral_desc, self.visitor_desc] if desc is not None]) + ' with %s left in period %s' % (self.ev_game_clock, self.period) class Meta: db_table = 'event'
def get_stale_tasks(self): """Get stale tasks""" # pylint: disable=import-outside-toplevel from muckrock.task.models import ( NewAgencyTask, OrphanTask, FlaggedTask, PortalTask, SnailMailTask, ) stale_tasks = OrderedDict() stale_tasks["Processing Requests"] = (FOIARequest.objects.filter( status="submitted", date_processing__lt=( date.today() - timedelta(5))).order_by("date_processing").annotate( days_old=ExtractDay(Now() - F("date_processing"))))[:5] task_types = [ (NewAgencyTask, 3), (OrphanTask, 5), (FlaggedTask, 5), (PortalTask, 5), (SnailMailTask, 5), ] for task_type, days_old in task_types: stale_tasks[task_type.type] = (task_type.objects.filter( date_created__lt=(timezone.now() - timedelta(days_old)), resolved=False, ).order_by("date_created").annotate(days_old=ExtractDay( Cast(Now() - F("date_created"), DurationField())))[:5]) return stale_tasks
class ToiletLectureQuerySet(models.QuerySet): metrics = { 'total_time': Sum('total_time'), 'max_time': Max('total_time'), 'avg_time': Avg('total_time'), 'total_visits': Count('id'), 'median_time': Percentile('total_time', 0.5, output_field=DurationField()) } def by_days(self, days): today = date.today() return self.filter(start_at__date__lte=today, start_at__date__gte=today - timedelta(days=int(days))) def group_by_hours(self): return self.extra({ 'hour': "extract(hour from start_at)" }).order_by('hour').values('hour').annotate( total_visits=Count('id'), total_time=Sum('total_time')) def get_summary(self): return self.aggregate(**self.metrics)
def get_tracks(self) -> Tuple[Iterable[Track], int]: qs = self.get_base_queryset() if self.kwargs.get('mode') == 'pro': qs = self.pro_queryset(qs) elif self.kwargs.get('mode') == 'hipster': qs = qs.filter(play=None) elif self.kwargs.get('mode') == 'almost-100': qs = qs.exclude( play__date__gt=Show.current().end - datetime.timedelta(days=(7 * 80)), ).exclude(play=None) elif self.kwargs.get('mode') == 'decade': qs = qs.for_decade( int(self.kwargs.get('decade', self.default_decade))) elif self.kwargs.get('mode') == 'staple': # Staple track: having been played more than once per year(ish) # since the track was made available. Exclude tracks that don't # yet have enough plays to be reasonably called a "staple". qs = (qs.annotate(plays=Count('play')).filter( plays__gt=2).annotate(time_per_play=Cast( ((Now() - F('revealed')) / F('plays')), output_field=DurationField())).filter( time_per_play__lt=parse_duration('365 days'))) # order_by('?') fails when annotate() has been used return (sample(list(qs), 5), qs.count()) elif self.kwargs.get('mode') == 'short': length_msec = int( self.kwargs.get('minutes', self.default_minutes_count)) * 60 * 1000 qs = qs.filter(msec__gt=length_msec - 60_000, msec__lte=length_msec) return (qs.order_by('?')[:5], qs.count())
class Test(Model): name = CharField(max_length=20) owner = ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=CASCADE) public = BooleanField(default=False) date = DateField(null=True, blank=True) datetime = DateTimeField(null=True, blank=True) permission = ForeignKey('auth.Permission', null=True, blank=True, on_delete=CASCADE) # We can’t use the exact names `float` or `decimal` as database column name # since it fails on MySQL. a_float = FloatField(null=True, blank=True) a_decimal = DecimalField(null=True, blank=True, max_digits=5, decimal_places=2) bin = BinaryField(null=True, blank=True) ip = GenericIPAddressField(null=True, blank=True) duration = DurationField(null=True, blank=True) uuid = UUIDField(null=True, blank=True) class Meta(object): ordering = ('name', )
def peer_cmd(): logger.info("Start") addresses = get_nodes_list() # explore every peer and collect updates updates = {} if settings.TEST_NET: for address in addresses: explore_node(address, updates) else: with ThreadPoolExecutor(max_workers=20) as executor: executor.map(lambda address: explore_node(address, updates), addresses) updates_with_data = tuple(filter(lambda x: x is not None, updates.values())) # if more than __% peers were gone offline in __min, probably network problem if len(updates_with_data) < get_count_nodes_online() * 0.9: logger.warning("Peers update was rejected: %d - %d", len(updates_with_data), len(addresses)) return # set all peers unreachable, if will no update - peer will be unreachable PeerMonitor.objects.update(state=PeerMonitor.State.UNREACHABLE) # calculate state and apply updates for update in updates_with_data: logger.debug("Update: %r", update) peer_obj = PeerMonitor.objects.filter( announced_address=update["announced_address"]).first() if not peer_obj: logger.info("Found new peer: %s", update["announced_address"]) update["state"] = get_state(update, peer_obj) form = PeerMonitorForm(update, instance=peer_obj) if form.is_valid(): form.save() else: logger.info("Not valid data: %r - %r", form.errors, update) PeerMonitor.objects.update(lifetime=F("lifetime") + 1) PeerMonitor.objects.filter( state__in=[PeerMonitor.State.UNREACHABLE, PeerMonitor.State.STUCK ]).update(downtime=F("downtime") + 1) PeerMonitor.objects.annotate(duration=ExpressionWrapper( Now() - F("last_online_at"), output_field=DurationField())).filter( duration__gte=timedelta(days=30)).delete() PeerMonitor.objects.update( availability=100 - (F("downtime") / F("lifetime") * 100), modified_at=timezone.now(), ) logger.info("Done")
class Test(Model): name = CharField(max_length=20) owner = ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=SET_NULL) public = BooleanField(default=False) date = DateField(null=True, blank=True) datetime = DateTimeField(null=True, blank=True) permission = ForeignKey('auth.Permission', null=True, blank=True, on_delete=PROTECT) # We can’t use the exact names `float` or `decimal` as database column name # since it fails on MySQL. a_float = FloatField(null=True, blank=True) a_decimal = DecimalField(null=True, blank=True, max_digits=5, decimal_places=2) bin = BinaryField(null=True, blank=True) ip = GenericIPAddressField(null=True, blank=True) duration = DurationField(null=True, blank=True) uuid = UUIDField(null=True, blank=True) try: from django.db.models import JSONField json = JSONField(null=True, blank=True) except ImportError: pass class Meta: ordering = ('name', )
def update_reports(contract, month_year): """ Update the Reports for the given contract starting with the given month/year. :param contract: :param month_year: :return: """ previous_report = Report.objects.filter(contract=contract, month_year=month_year - relativedelta(months=1)) carry_over_worktime = datetime.timedelta( minutes=contract.initial_carryover_minutes) if previous_report.exists(): carry_over_worktime = (previous_report.first().worktime - previous_report.first().debit_worktime) # Loop over all Reports starting from month in which the created/update shift # took place. for report in Report.objects.filter(contract=contract, month_year__gte=month_year): total_work_time = Shift.objects.filter( contract=report.contract, started__month=report.month_year.month, started__year=report.month_year.year, was_reviewed=True, ).aggregate(total_work_time=Coalesce( Sum(F("stopped") - F("started"), output_field=DurationField()), datetime.timedelta(0), ))["total_work_time"] report.worktime = carry_over_worktime + total_work_time report.save() carry_over_worktime = report.worktime - report.debit_worktime