def get_data_gpu(delta, pk): data = InformerData.objects.filter(informer_id=pk, date__gte=timezone.now() - timedelta(hours=delta)) msgs = data.values_list('msg', flat=True).distinct() data_gpu = [] for msg in msgs: data_msg = data.filter(msg=msg) date_dict = { 1: data_msg.values(hour=ExtractHour('date'), minute=ExtractMinute('date')), 24: data_msg.values(hour=ExtractHour('date')), 720: data_msg.values(day=ExtractDay('date')), 8640: data_msg.values(month=ExtractMonth('date')) } data_list_dict = date_dict[delta].annotate( temperature_avg=Avg('temperature')) os_x = [] os_y = [] for i in data_list_dict: os_x.append(list(i.values())[0]) os_y.append(list(i.values())[1]) data_gpu.append({msg: [os_x, os_y]}) print(data_gpu) return data_gpu
def test_extract_func_with_timezone(self): start_datetime = microsecond_support(datetime(2015, 6, 15, 23, 30, 1, 321)) end_datetime = microsecond_support(datetime(2015, 6, 16, 13, 11, 27, 123)) start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) melb = pytz.timezone('Australia/Melbourne') qs = DTModel.objects.annotate( day=Extract('start_datetime', 'day'), day_melb=Extract('start_datetime', 'day', tzinfo=melb), weekday=ExtractWeekDay('start_datetime'), weekday_melb=ExtractWeekDay('start_datetime', tzinfo=melb), hour=ExtractHour('start_datetime'), hour_melb=ExtractHour('start_datetime', tzinfo=melb), ).order_by('start_datetime') utc_model = qs.get() self.assertEqual(utc_model.day, 15) self.assertEqual(utc_model.day_melb, 16) self.assertEqual(utc_model.weekday, 2) self.assertEqual(utc_model.weekday_melb, 3) self.assertEqual(utc_model.hour, 23) self.assertEqual(utc_model.hour_melb, 9) with timezone.override(melb): melb_model = qs.get() self.assertEqual(melb_model.day, 16) self.assertEqual(melb_model.day_melb, 16) self.assertEqual(melb_model.weekday, 3) self.assertEqual(melb_model.weekday_melb, 3) self.assertEqual(melb_model.hour, 9) self.assertEqual(melb_model.hour_melb, 9)
def test_extract_hour_func(self): start_datetime = microsecond_support( datetime(2015, 6, 15, 14, 30, 50, 321)) end_datetime = microsecond_support( datetime(2016, 6, 15, 14, 10, 50, 123)) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractHour( 'start_datetime')).order_by('start_datetime'), [(start_datetime, start_datetime.hour), (end_datetime, end_datetime.hour)], lambda m: (m.start_datetime, m.extracted)) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractHour( 'start_time')).order_by('start_datetime'), [(start_datetime, start_datetime.hour), (end_datetime, end_datetime.hour)], lambda m: (m.start_datetime, m.extracted)) self.assertEqual( DTModel.objects.filter( start_datetime__hour=ExtractHour('start_datetime')).count(), 2)
def visitor_chart10(request): label = "Visitors per Hour" day_strings = { 1: "Sunday", 2: "Monday", 3: "Tuesday", 4: "Wednesday", 5: "Thursday", 6: "Friday", 7: "Saturday" } days = {} data = {} hour_list = set() for day in range(1, 8): hour_dict = {} day_name = day_strings[day] queryset = Checkin.objects.filter(date__week_day=day).annotate( startHour=ExtractHour('timeIn'), endHour=ExtractHour('timeOut')).values('startHour', 'endHour') for entry in queryset: for i in range(entry['startHour'], entry['endHour'] + 1): hour_list.add(i) if i in hour_dict: hour_dict[i] += 1 else: hour_dict[i] = 1 data[day] = hour_dict for hour_dict in data.values(): for hour in hour_list: if not hour in hour_dict: hour_dict[hour] = 0 return JsonResponse(data={'label': label, 'data': data})
def heatmap(self, request: Request) -> Response: """Get the data to generate a heatmap for the volunteer. This includes one entry for every weekday and every hour containing the number of transcriptions made in that time slot. For example, there will be an entry for Sundays at 13:00 UTC, counting how many transcriptions the volunteer made in that time. The week days are numbered Monday=1 through Sunday=7. """ utc_offset = int(request.GET.get("utc_offset", "0")) # Construct a timezone from the offset tzinfo = datetime.timezone(datetime.timedelta(seconds=utc_offset)) heatmap = ( self.filter_queryset(Submission.objects).filter(complete_time__isnull=False) # Extract the day of the week and the hour the transcription was made in .annotate( day=ExtractIsoWeekDay("complete_time", tzinfo=tzinfo), hour=ExtractHour("complete_time", tzinfo=tzinfo), ) # Group by the day and hour .values("day", "hour") # Count the transcription made in each time slot .annotate(count=Count("id")) # Return the values .values("day", "hour", "count") # Order by day first, then hour .order_by("day", "hour") ) return Response(heatmap)
def traffic(request): type = request.GET['type'] result = None if type == 'D': startDay = date.today() endDay = startDay + timedelta(days=-7) result = list( models.TblExecuteHistory.objects.filter( regdate__range=[endDay, startDay]).annotate( date=ExtractWeekDay('regdate')).values('date').annotate( count=Count('no'), sex=F('member_mid__sex'))) + [{ 'type': type }] elif type == 'H': result = list( models.TblExecuteHistory.objects.filter( regdate__contains=date.today() + timedelta(days=-1)).annotate( date=ExtractHour('regdate')).values('date').annotate( count=Count('no'), sex=F('member_mid__sex'))) + [{ 'type': type }] elif type == 'M': result = list( models.TblExecuteHistory.objects.filter( regdate__contains=date.today().year).annotate( date=ExtractMonth('regdate')).values('date').annotate( count=Count('no'), sex=F('member_mid__sex'))) + [{ 'type': type }] return JsonResponse(result, safe=False)
def get_queryset(self): self.scope = get_request_or_fallback(self.request, "scope", "annualy", str,False) self.year = get_request_or_fallback(self.request, "year", datetime.now().year, str, False) self.month = get_request_or_fallback(self.request, "months", None, str, False) self.day = get_request_or_fallback(self.request, "day", None, str, False) if self.month != None : self.month = int(self.month) if self.day != None: self.day = int(self.day) qrySet = Timesheets.objects.filter( time__year=self.year ).values( "user__id", "user__user__first_name", "user__user__last_name" ).annotate( year=ExtractYear("time", output_field=IntegerField()), month=ExtractMonth("time", output_field=IntegerField()), day=ExtractDay("time", output_field=IntegerField()) ).annotate( seconds=ExtractSecond("time", output_field=IntegerField()), minutes=ExtractMinute("time", output_field=IntegerField()), hours=ExtractHour("time", output_field=IntegerField()) ).filter(getFilterIfContentAdmin(self.request)).order_by("time", "user__id") return TimestampDisplay(TimeStampsManager(qrySet, self.year)).getScopedView(self.scope, self.month, self.day)
def get_category_data_by_hour(count, section, category, lane=None, direction=None, start=None, end=None): if not start: start = count.start_process_date if not end: end = count.end_process_date + timedelta(days=1) qs = models.CountDetail.objects.filter( id_lane__id_section=section, timestamp__range=(start, end), id_category=category, ) if count is not None: qs = qs.filter(id_count=count) if lane is not None: qs = qs.filter(id_lane=lane) if direction is not None: qs = qs.filter(id_lane__direction=direction) qs = qs.annotate(hour=ExtractHour('timestamp')) \ .values('hour', 'times') \ .annotate(value=Sum('times')) \ .values('hour', 'value') \ .values_list('hour', 'value') return qs
def get_average_speed_by_hour(count, section, lane=None, direction=None, start=None, end=None, v=0.15): if not start: start = count.start_process_date if not end: end = count.end_process_date + timedelta(days=1) qs = models.CountDetail.objects.filter(id_lane__id_section=section, timestamp__range=(start, end)) if count is not None: qs = qs.filter(id_count=count) if lane is not None: qs = qs.filter(id_lane=lane) if direction is not None: qs = qs.filter(id_lane__direction=direction) qs = qs.annotate(hour=ExtractHour('timestamp')) \ .order_by('hour', 'speed') \ .values('hour', 'speed') df = pd.DataFrame.from_records(qs.values('hour', 'speed')) if not df.empty: df = df.set_index('hour') df = df.groupby('hour').mean('speed') return df
def index(request): qtd_cadastro = Cadastro.objects.count() total_vendas = Recibo.objects.all().aggregate(Sum('total')) media_vendas = Recibo.objects.all().aggregate(Avg('total')) hoje = timezone.now() dt_inicio = hoje.replace(hour=0, minute=0, second=0, microsecond=0) dt_final = hoje.replace(hour=23, minute=59, second=59, microsecond=0) cad_por_hora = Cadastro.objects.filter(data_hora__range=(dt_inicio, dt_final)) \ .annotate(hora=ExtractHour('data_hora')) \ .values('hora') \ .order_by('hora') \ .annotate(qtd=Count('id')) \ .values('hora', 'qtd') vendas_por_dia = Recibo.objects.annotate(dia=Trunc('data_hora', 'day', output_field=DateTimeField())) \ .values('dia') \ .order_by('dia') \ .annotate(qtd=Count('id')) \ .annotate(total=Sum('total')) \ .values('dia', 'qtd', 'total') return render(request, 'index.html', { 'qtd_cadastro': qtd_cadastro, 'total_vendas': total_vendas, 'media_vendas': media_vendas, 'vendas_por_dia': vendas_por_dia, 'cad_por_hora': cad_por_hora })
def addSem(startDate, endDate, dataIndex): # create set of all start dates between semester start and end dates = set() for week in daterange(startDate, endDate): start = week - timedelta(days=week.weekday()) dates.add(start) dates = sorted(list(dates)) # get total hours per week in queryData (unordered set) queryset = Checkin.objects.all().annotate( durationDiff=F('timeOut') - F('timeIn'), duration=(ExtractHour('durationDiff') * 60 + ExtractMinute('durationDiff')), weekstart=TruncWeek('date')).values('weekstart').annotate( sumHours=Sum('duration')).order_by('weekstart') queryData = queryset.values('weekstart', 'sumHours') # put hours per week (in sequential order) in data array that will be returned finalSet = [] for d in dates: hours = 0 for val in queryData: if val['weekstart'] == d: hours = val['sumHours'] finalSet.append({'weekstart': d, 'sumHours': hours}) for x in finalSet: data[dataIndex]['data'].append(x['sumHours'] / 60) # didn't start using check-in app till week 8 in Spring 2020 if (dataIndex == 0): for x in range(0, 7): data[0]['data'][x] = None # 8/23/21 is week 1 of Fall 2021 (remove weeks after that for now) # Increment the range (1st parameter) each week to add next data point if (dataIndex == 3): for x in range(2, 17): data[3]['data'].pop()
def computeHoursDaily(self, employee): date = self.getDate() querySet = Timesheets.objects.filter( time__year=date.year ).values( "user__id", "user__user__first_name", "user__user__last_name" ).annotate( year=ExtractYear("time", output_field=IntegerField()), month=ExtractMonth("time", output_field=IntegerField()), day=ExtractDay("time", output_field=IntegerField()) ).annotate( seconds=ExtractSecond("time", output_field=IntegerField()), minutes=ExtractMinute("time", output_field=IntegerField()), hours=ExtractHour("time", output_field=IntegerField()) ).filter(getFilterIfContentAdmin(self.request)).order_by("time", "user__id") data = TimestampDisplay(TimeStampsManager(querySet, date.year )).getDailyView( date.day, date.month, employee.id ) if data == {}: return (0,0) else: return data[employee.id]
def get_time_data_yearly(year, section, lane=None, direction=None): """Vehicles by hour and day of the week""" start = datetime(year, 1, 1) end = datetime(year + 1, 1, 1) # By lane/direction grouped per hour qs = models.CountDetail.objects.filter(id_lane__id_section=section, timestamp__range=(start, end)) if lane is not None: qs = qs.filter(id_lane=lane) if direction is not None: qs = qs.filter(id_lane__direction=direction) # Vehicles by day and hour qs = qs.annotate(date=Trunc('timestamp', 'day'), hour=ExtractHour('timestamp')) \ .order_by('hour') \ .values('date', 'hour', 'times') \ .order_by('date', 'hour') \ .annotate(thm=Sum('times')) \ .values('import_status', 'date', 'hour', 'thm') df = pd.DataFrame.from_records(qs) df = df.groupby([df['date'].dt.dayofweek, 'hour']).thm.sum() df = df.reset_index() return df
def by_hour(hours): qs_ns = Sample.objects.filter(valid=True).filter( is_source=False, trip__date__gte=datetime.date( 2017, 5, 1)).annotate(h=ExtractHour('exp_arrival')).filter(h__in=hours) qs_ns_late = qs_ns.filter(delay_arrival__gte=300) qs_s = Sample.objects.filter(valid=True).filter( is_source=True, trip__date__gte=datetime.date( 2017, 5, 1)).annotate(h=ExtractHour('exp_departure')).filter(h__in=hours) qs_s_late = qs_s.filter(delay_departure__gte=300) print('hours = %s' % (hours)) print('non source = %d / %d => %.2f' % (qs_ns_late.count(), qs_ns.count(), 100 * qs_ns_late.count() / qs_ns.count())) print('source = %d / %d => %.2f' % (qs_s_late.count(), qs_s.count(), 100 * qs_s_late.count() / qs_s.count()))
def retrieve(self, request, pk=None, date=None): batterylog = models.Batterylog.objects.filter(morcha=pk) batterylog = batterylog.annotate( hour=ExtractHour('detected_datetime'), minute=ExtractMinute('detected_datetime')).values( 'hour', 'minute', 'battery_level') print batterylog.query print batterylog.model print batterylog.using return Response({'bl': batterylog})
def chart(request): citywise_customers = customer.objects.all().annotate(cityc=Count('city')) city1_customers = citywise_customers.filter(city__city='ludhiana') city2_customers = citywise_customers.filter(city__city='chandigarh') city1item1 = customer.objects.filter(city__city='ludhiana', item__item='school bag') city1item2 = customer.objects.filter(city__city='ludhiana', item__item='hand bag') city1item3 = customer.objects.filter(city__city='ludhiana', item__item='travelling bag') city2item1 = customer.objects.filter(city__city='chandigarh', item__item='school bag') city2item2 = customer.objects.filter(city__city='chandigarh', item__item='hand bag') city2item3 = customer.objects.filter(city__city='chandigarh', item__item='travelling bag') labels = [str(x) + ':00 hours' for x in range(1, 13)] itemlabels = ['school bag', 'hand bag', 'travelling bag'] hourwise_customers1 = city1_customers.annotate( hour=ExtractHour('timestamp')).values('hour').annotate( c=Count('id')).values('hour', 'c') hourwise_customers2 = city2_customers.annotate( hour=ExtractHour('timestamp')).values('hour').annotate( c=Count('id')).values('hour', 'c') hourwisedata1 = [x['c'] for x in hourwise_customers1] hourwisedata2 = [x['c'] for x in hourwise_customers2] context = { 'hourwise_customers1': hourwisedata1, 'hourwise_customers2': hourwisedata2, 'labels': labels, 'city1item1': city1item1.count(), 'city1item2': city1item2.count(), 'city1item3': city1item3.count(), 'itemlables': itemlabels, 'city2item1': city2item1.count(), 'city2item2': city2item2.count(), 'city2item3': city2item3.count() } return render(request, 'chartjs.html', context=context)
def get_last_update(): data_banco = Embrapa_Last_Updated.objects.annotate( day=ExtractDay('last_updated'), month=ExtractMonth('last_updated'), year=ExtractYear('last_updated'), hour=ExtractHour('last_updated'), minute=ExtractMinute('last_updated'), second=ExtractSecond('last_updated'), ).values('day', 'month', 'year', 'hour', 'minute', 'second').get() print(data_banco) day = data_banco["day"] month = data_banco["month"] year = data_banco["year"] hour = data_banco["hour"] minute = data_banco["minute"] second = data_banco["second"] result_in_seconds_from_database = day * 86400 + month * 2592000 + year * 31104000 + hour * 3600 + minute * 60 + second print("resultado do banco em segundos:") print(result_in_seconds_from_database) date_current = datetime.now() format_date = str(date_current) format_year = format_date[0:4] format_year = int(format_year) format_month = format_date[5:7] format_month = int(format_month) format_day = format_date[8:10] format_day = int(format_day) format_hour = format_date[11:13] format_hour = int(format_hour) format_minute = format_date[14:16] format_minute = int(format_minute) format_second = format_date[17:19] format_second = int(format_second) result_all_in_seconds_current = format_day * 86400 + format_month * 2592000 + format_year * 31104000 + format_hour * 3600 + format_minute * 60 + format_second print(result_all_in_seconds_current) return [result_all_in_seconds_current, result_in_seconds_from_database]
def get_total_today(taradod): qn = taradod.filter( seen__day=datetime.datetime.now().astimezone().day ).annotate( hour=ExtractHour('seen'), ).values( 'hour' ).annotate( n=Count('pk') ).order_by('hour') data = Counter({d['hour']: d['n'] for d in qn}) ds = 23 result = [data[i] for i in range(0, ds + 1)] return result
def GoogleChartData(request): start_day = request.GET.get('start-day') end_day = request.GET.get('end-day') dt_start_day = datetime.datetime.strptime(start_day, '%Y-%m-%d') dt_end_day = datetime.datetime.strptime( end_day, '%Y-%m-%d') + datetime.timedelta(days=1) datapoint = request.GET.get('datapoint') building_pk, building, model, endpoint = datapoint.split('-') # model에 따라 데이터 조회 if model == 'Ismart': queryset = Ismart.objects.filter( building__pk=building_pk, datetime__range=(dt_start_day, dt_end_day)).order_by('datetime') elif model == 'Weather': station_id = Building.objects.get( pk=building_pk).weather_station.station_id queryset = Weather.objects.filter( weather_station__station_id=station_id, datetime__range=(dt_start_day, dt_end_day)).order_by('datetime') queryset = queryset.annotate(timestamp=Concat(Value('Date('), ExtractYear('datetime'), Value(', '), ExtractMonth('datetime') - 1, Value(', '), ExtractDay('datetime'), Value(', '), ExtractHour('datetime'), Value(', '), ExtractMinute('datetime'), Value(', '), ExtractSecond('datetime'), Value(')'), output_field=CharField())) data = queryset.values_list('timestamp', endpoint) result = { 'prop': { 'id': datapoint, 'label': building + '-' + model + '-' + endpoint, 'type': 'number' }, 'data': dict((x[0], x[1]) for x in data) } return HttpResponse(json.dumps(result, ensure_ascii=False), content_type="application/json; charset=utf-8")
def get_time_data(count, section, lane=None, direction=None, start=None, end=None, exclude_trash=False): if not start: start = count.start_process_date if not end: end = count.end_process_date + timedelta(days=1) # By lane/direction grouped per hour qs = models.CountDetail.objects.filter(id_count=count, id_lane__id_section=section, timestamp__range=(start, end)) if lane is not None: qs = qs.filter(id_lane=lane) if direction is not None: qs = qs.filter(id_lane__direction=direction) if exclude_trash: qs = qs.exclude(id_category__trash=True) # Vehicles by day and hour qs = qs.annotate(date=Trunc('timestamp', 'day'), hour=ExtractHour('timestamp')) \ .order_by('hour') \ .values('date', 'hour', 'times') \ .order_by('-date', 'hour') \ .annotate(thm=Sum('times')) \ .values('import_status', 'date', 'hour', 'thm') df = pd.DataFrame.from_records(qs) if not df.empty: df['date'] = df['date'].dt.strftime('%a %d.%m.%Y') df['import_status'].replace({ 0: 'Existant', 1: 'Nouveau' }, inplace=True) return df
def values_by_day_and_hour(self): # Get all the count details for section and the year qs = CountDetail.objects.filter( id_lane__id_section__id=self.section_id, timestamp__year=self.year, import_status=definitions.IMPORT_STATUS_DEFINITIVE, ) # TODO: don't divide by 51 but actually aggregate first by the # real days (with sum) and then aggregate by weekday (with average) # Total by day of the week (0->monday, 6->sunday) and by hour (0->23) result = qs.annotate(weekday=ExtractIsoWeekDay('timestamp')) \ .annotate(hour=ExtractHour('timestamp')) \ .values('weekday', 'hour') \ .annotate(tjm=Sum('times') / 51) \ .values('weekday', 'hour', 'tjm') return result
def get_events_per_hour(self) -> list[dict[str, int]]: """Get event count by hour in the last day, fill with zeros""" date_from = now() - timedelta(days=1) result = (self.filter(created__gte=date_from).annotate( age=ExpressionWrapper( now() - F("created"), output_field=DurationField())).annotate( age_hours=ExtractHour("age")).values("age_hours").annotate( count=Count("pk")).order_by("age_hours")) data = Counter({int(d["age_hours"]): d["count"] for d in result}) results = [] _now = now() for hour in range(0, -24, -1): results.append({ "x_cord": time.mktime((_now + timedelta(hours=hour)).timetuple()) * 1000, "y_cord": data[hour * -1], }) return results
def get_users_by_time(self, from_date, to_date, user): actions_filters = dict(timestamp__gt=from_date, timestamp__lt=to_date, verb='logged in') if not user.is_superuser: if not hasattr(user, 'organization'): raise SuspiciousOperation actions_filters['target_object_id'] = user.organization.id dataset = Action.objects.filter(**actions_filters).annotate( weekday=ExtractWeekDay('timestamp'), hour=ExtractHour('timestamp')).values('weekday', 'hour').annotate( count=Count('hour')).values('weekday', 'hour', 'count').order_by('weekday') cluster_map = defaultdict(list) for data_item in dataset: cluster_map[data_item['weekday']].append(data_item) def dataset_factory(week_day, daydata_dict): if not daydata_dict: return dict() daydata_map = [ 'Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday' ] label = daydata_dict[0]['weekday'] label = daydata_map[label - 1] date_count_tuple = [(daydata['hour'], daydata['count']) for daydata in daydata_dict] date_count_tuple = self.fillin_missing_hour_data(date_count_tuple) return dict( label=label, data=[date_count[1] for date_count in date_count_tuple], fill=False, borderColor=get_random_color() # lineTension=0.1 ) return [dataset_factory(*item) for item in cluster_map.iteritems()]
def date_accept_activity(self, request): report_type = self.request.query_params.get("report_type", None) results = [] if report_type not in ['hour', 'week', 'month', 'day']: return Response(status=status.HTTP_400_BAD_REQUEST) queryset = self.set_activity_report_queryset( self.request.query_params, filter_start_date='accepted_at__gte', filter_end_date='accepted_at__lte' ) queryset = queryset.filter(~Q(accepted_at=None)) if report_type == "hour": results = queryset.annotate( axis_x=ExtractHour('accepted_at') ).values('axis_x').annotate(count=Count('id')).order_by("axis_x") results = list(results) elif report_type == "week": results = queryset.annotate( axis_x=ExtractWeekDay('accepted_at') ).values('axis_x').annotate(count=Count('id')).order_by("axis_x") week_days = WEEK_DAYS.copy() # map data for result for report_item in results: day_value = (report_item['axis_x'] % 7) + 1 report_item['axis_x'] = day_value report_item['sort_key'] = day_value report_item['axis_x'] = week_days[str(report_item['axis_x'])] results = sorted(results, key=lambda k: k['sort_key']) elif report_type == "day": results = list(queryset.annotate( axis_x=TruncDay('accepted_at') ).values('axis_x').annotate(count=Count('id')).order_by("axis_x")) for report_item in results: report_item['axis_x'] = gregorian_to_persian_chart(report_item['axis_x']) return Response(results, status=status.HTTP_200_OK)
def get_context_data(self, **kwargs): context = super(EventMonthView, self).get_context_data(**kwargs) qs = self.request.META['QUERY_STRING'] year, month, error = self.get_year_and_month(self.net, qs) # add a dict containing the year, month, and month name to the context current = dict( year=year, month_num=month, month=MONTHS_ALT[month][:3] ) context['current'] = current display_month = MONTHS_ALT[month] if isinstance(display_month, six.binary_type): display_month = display_month.decode('utf-8') context['month_and_year'] = u"%(month)s, %(year)d" % ( {'month': display_month, 'year': year} ) if error: # send any year/month errors context['cal_error'] = error all_month_events = list( self.get_month_events( year, month, self.category, self.tag, loc=True, cncl=True ).annotate( start_hour=ExtractHour('start_date') ).order_by('start_hour') ) context['raw_all_month_events'] = all_month_events context['show_events'] = False if getattr(settings, "CALENDAR_SHOW_LIST", False): context['show_events'] = True context['events'] = c.order_events(all_month_events, d=True) \ if self.request.is_ajax() else c.order_events(all_month_events) return context
def values_by_hour_and_direction(self, direction, weekdays=[0, 1, 2, 3, 4, 5, 6]): # Get all the count details for section and the year qs = CountDetail.objects.filter( id_lane__id_section__id=self.section_id, timestamp__year=self.year, id_lane__direction=direction, timestamp__iso_week_day__in=weekdays, import_status=definitions.IMPORT_STATUS_DEFINITIVE, ) # TODO: don't divide by 365 # Total by hour (0->23) result = qs.annotate(hour=ExtractHour('timestamp')) \ .values('hour') \ .annotate(tjm=Sum('times') / 365) \ .values('hour', 'tjm') return result
def get_queryset(self): queryset = self.get_curated_events(user=self.request.user).all() country_code = self.request.query_params.get('country_code', None) pax = self.request.query_params.get('pax', None) event_type = self.request.query_params.get('event_type', None) duration = self.request.query_params.get('duration', None) budget = self.request.query_params.get('budget', None) is_past_event = self.request.query_params.get('is_past_event', None) weight_func = 0 is_search = False if is_past_event is not None: if is_past_event.isdigit() and bool(int(is_past_event)): queryset = queryset.filter(is_past_event=True) elif is_past_event.isdigit() and not bool(int(is_past_event)): queryset = queryset.filter(is_past_event=False) if country_code is not None: queryset = queryset.filter(country__code=country_code) if event_type is not None and event_type != '*': queryset = queryset.filter(type=event_type) if pax is not None and pax != '*': weight_func += F('pax') - int(pax) is_search = True if duration is not None and duration != '*': weight_func += ExtractHour(F('duration'), 'epoch') - int(duration) is_search = True if budget is not None and budget != '*': # Scale down the effect of price to match up with pax. weight_func += (F('price') - int(budget)) / 100 is_search = True if is_search: queryset = queryset.annotate(weight=Func(ExpressionWrapper( weight_func, output_field=IntegerField()), function='ABS')) queryset = queryset.order_by('weight') else: queryset = queryset.order_by('-price') return queryset
def visitor_chart4(request): labels = [] data = [] oldestWeek = Checkin.objects.earliest('date') currWeek = date.today() def daterange(date1, date2): for n in range(int((date2 - date1).days) + 1): yield date1 + timedelta(n) dates = set() for week in daterange(getattr(oldestWeek, 'date'), currWeek): start = week - timedelta(days=week.weekday()) dates.add(start) dates = sorted(list(dates)) queryset = Checkin.objects.all().annotate( durationDiff=F('timeOut') - F('timeIn'), duration=(ExtractHour('durationDiff') * 60 + ExtractMinute('durationDiff')), weekstart=TruncWeek('date')).values('weekstart').annotate( sumHours=Sum('duration')).order_by('weekstart') queryData = queryset.values('weekstart', 'sumHours') finalSet = [] for d in dates: hours = 0 for val in queryData: if val['weekstart'] == d: hours = val['sumHours'] finalSet.append({'weekstart': d, 'sumHours': hours}) for entry in finalSet: labels.append(entry['weekstart']) data.append(entry['sumHours'] / 60) return JsonResponse(data={'labels': labels, 'data': data})
def stats_order_heatmap(request): try: from_year = int(request.GET.get('from_year', 2022)) # TODO: improve filtering except ValueError: raise Http404("Året finns inte") orders = models.Order.objects.filter( accepted=True, put_at__year__gte=from_year).annotate( weekday=ExtractIsoWeekDay('put_at'), hour=ExtractHour("put_at"), minute=ExtractMinute("put_at"), ).annotate(quarter=Case( When(minute__gte=0, minute__lt=15, then=Value(0)), When(minute__gte=15, minute__lt=30, then=Value(15)), When(minute__gte=30, minute__lt=45, then=Value(30)), When(minute__gte=45, then=Value(45)), output_field=IntegerField(), )).filter(weekday__lt=6, hour__gte=8, hour__lte=16).values("weekday", "hour", "quarter").annotate( count=Count("id")).order_by("weekday", "hour", "quarter") data = DataFrame() weekdays = ["Måndag", "Tisdag", "Onsdag", "Torsdag", "Fredag"] for timepoint in orders: data.at[weekdays[timepoint["weekday"] - 1], time(timepoint["hour"], timepoint["quarter"])] = int( timepoint["count"]) sns.set_theme() plt.figure(figsize=(16, 9)) plot = sns.heatmap(data, cbar=False, cmap="YlGnBu") plot.figure.autofmt_xdate() buffer = BytesIO() plot.get_figure().savefig(buffer, format='png') buffer.seek(0) return FileResponse(buffer, filename='heatmap.png')
def datos_horarios_json(est_id,var_id,fec_ini,fec_fin): consulta=(Medicion.objects.filter(est_id=est_id) .filter(var_id=var_id).filter(med_fecha__range=[fec_ini,fec_fin])) consulta=consulta.annotate(year=ExtractYear('med_fecha'), month=ExtractMonth('med_fecha'), day=ExtractDay('med_fecha'), hour=ExtractHour('med_fecha') ).values('year','month','day','hour') if(var_id==1): consulta=list(consulta.annotate(valor=Sum('med_valor')). values('valor','year','month','day','hour'). order_by('year','month','day','hour')) else: consulta=list(consulta.annotate(valor=Avg('med_valor'), maximo=Max('med_maximo'),minimo=Min('med_minimo')). values('valor','maximo','minimo','year','month','day','hour'). order_by('year','month','day','hour')) datos=[] if len(consulta)>0: for fila in consulta: fecha_str = (str(fila.get('year'))+":"+ str(fila.get('month'))+":"+str(fila.get('day'))) fecha = datetime.strptime(fecha_str,'%Y:%m:%d').date() hora=datetime.time(fila.get('hour')) fecha_hora=datetime.combine(fecha,hora) dato={ 'fecha':fecha_hora, 'valor':fila.get('valor'), 'maximo':fila.get('maximo'), 'minimo':fila.get('minimo'), } datos.append(dato) else: datos={ 'mensaje':'no hay datos' } return datos