def consulta_dashboard(request): menu = menu_consultas(request) entradas = OntEntrada.objects.all().annotate(mes=TruncWeek('data')).values( 'mes', ).annotate(total=Count('id')).order_by('mes') saidas = OntSaida.objects.all().annotate( mes=TruncWeek('data')).values('mes').annotate( total=Count('id')).order_by('mes') total = Ont.objects.filter(status__in=[0, 1]).aggregate( total=Count('id'))['total'] onts = Ont.objects.filter(status__in=[0, 1]).values('status', ).annotate( qtd=ExpressionWrapper(Count('id'), output_field=FloatField()) / Value(total, output_field=FloatField()) * 100.0).order_by('qtd') context = { 'entradas': entradas, 'saidas': saidas, 'onts': onts, } context.update(menu) return render(request, "cont/v2/consulta_dashboard.html", context)
def addSem(startDate, endDate, dataIndex): # create set of all start dates between semester start and end dates = set() for week in daterange(startDate, endDate): start = week - timedelta(days=week.weekday()) dates.add(start) dates = sorted(list(dates)) # get count per week in queryData (unordered set) queryset = Checkin.objects.annotate( weekstart=TruncWeek('date')).values('weekstart').annotate( count=Count('id')).order_by('weekstart') queryData = queryset.values('weekstart', 'count') # put count per week (in sequential order) in data array that will be returned finalSet = [] for d in dates: finalCount = 0 for val in queryData: if val['weekstart'] == d: finalCount = val['count'] finalSet.append({'weekstart': d, 'count': finalCount}) for x in finalSet: data[dataIndex]['data'].append(x['count']) # didn't start using check-in app till week 8 in Spring 2020 if (dataIndex == 0): for x in range(0, 7): data[0]['data'][x] = None # 8/23/21 is week 1 of Fall 2021 (remove weeks after that for now) # Change the range (1st parameter) each week to add next data point if (dataIndex == 3): for x in range(2, 17): data[3]['data'].pop()
def chart_weekly(self): return ( PointIssued.objects.annotate(date=TruncWeek("created_at")) .values("date") .annotate(y=Sum("point")) .order_by("-date") )
def addSem(startDate, endDate, dataIndex): # create set of all start dates between semester start and end dates = set() for week in daterange(startDate, endDate): start = week - timedelta(days=week.weekday()) dates.add(start) dates = sorted(list(dates)) # get total hours per week in queryData (unordered set) queryset = Checkin.objects.all().annotate( durationDiff=F('timeOut') - F('timeIn'), duration=(ExtractHour('durationDiff') * 60 + ExtractMinute('durationDiff')), weekstart=TruncWeek('date')).values('weekstart').annotate( sumHours=Sum('duration')).order_by('weekstart') queryData = queryset.values('weekstart', 'sumHours') # put hours per week (in sequential order) in data array that will be returned finalSet = [] for d in dates: hours = 0 for val in queryData: if val['weekstart'] == d: hours = val['sumHours'] finalSet.append({'weekstart': d, 'sumHours': hours}) for x in finalSet: data[dataIndex]['data'].append(x['sumHours'] / 60) # didn't start using check-in app till week 8 in Spring 2020 if (dataIndex == 0): for x in range(0, 7): data[0]['data'][x] = None # 8/23/21 is week 1 of Fall 2021 (remove weeks after that for now) # Increment the range (1st parameter) each week to add next data point if (dataIndex == 3): for x in range(2, 17): data[3]['data'].pop()
def changelist_view(self, request, extra_context=None): # Data in metrics chart chart_data = ( User.objects.annotate(date=TruncDay("date_joined")) .values("date") .annotate(y=Count("id")) .order_by("-date") ) # Monthly user creation metrics month_data = ( User.objects.annotate(date=TruncMonth("date_joined")) .values("date") .annotate(y=Count("id")) .order_by("-date") ) # Weekly user creation metrics weekly_data = ( User.objects.annotate(date=TruncWeek("date_joined")) .values("date") .annotate(y=Count("id")) .order_by("-date") ) # Convert to Json for the chart input as_json = json.dumps(list(chart_data), cls=DjangoJSONEncoder) # Pass all data into the change_list context extra_context = extra_context or {"chart_data": as_json, "weekly_data": weekly_data, "monthly_data": month_data} return super().changelist_view(request, extra_context=extra_context)
def get(self, request, *args, **kwargs): member_id = self.kwargs.get("player_id") time_period = self.kwargs.get("time_period") player = Member.objects.get(pk=member_id) ratings = None min_ratings = 3 if time_period == 1: ratings = (player.ratings_set.all().filter( elab_date__year__gte=self.__get_less_current_date( 52).year).order_by("elab_date")) elif time_period == 5: ratings = (player.ratings_set.all().filter( elab_date__year__gte=self.__get_less_current_date(260).year ).annotate( week_date=TruncWeek("elab_date")).values("week_date").annotate( Avg("rating"), Avg("sigma")).order_by("week_date")) elif time_period == 10: ratings = (player.ratings_set.all().filter( elab_date__year__gte=self.__get_less_current_date( 520).year).annotate(month_date=TruncMonth( "elab_date")).values("month_date").annotate( Avg("rating"), Avg("sigma")).order_by("month_date")) if ratings == None or ratings.count() < min_ratings: return JsonResponse({ "status": "not enough data", "status_message": "Not enough data to produce a rating graph.", }) return JsonResponse(self.__get_ratings_json(ratings))
def chart_regression(request): h_list = Homicide.objects.annotate( week=TruncWeek('date')).values('week').annotate( ct=Count('count')).values('week', 'ct') df = pd.DataFrame(list(h_list)) df['cum'] = df['ct'].cumsum() df['i'] = df.index + 1 # 1 is start of year df = df.drop(columns=['week', 'ct']) # Define the degree of the polynomial fit degree_list = [1, 2, 3] # break into baby steps for graph poly_data = pd.DataFrame({'xfit': np.linspace(df['i'].min(), 52, 500)}) # 12 is end of year for degree in degree_list: poly_data[str(degree)] = np.poly1d( np.polyfit(df['i'], df['cum'], degree))(poly_data['xfit']) # Tidy the dataframe so 'degree' is a variable poly_data = pd.melt(poly_data, id_vars=['xfit'], value_vars=[str(deg) for deg in degree_list], var_name='degree', value_name='yfit') # Plot the data points on an interactive axis points = alt.Chart( df, title='Polynomial Regression Predictions').mark_circle( color='black').encode(x=alt.X('i', title='weeks'), y=alt.Y('cum', title='cumulative murder count')) # Plot the best fit polynomials polynomial_fit = alt.Chart(poly_data).mark_line().encode( x='xfit', y='yfit', color=alt.Color('degree', title='Degree / Model'), tooltip=[ alt.Tooltip('yfit', title='predicted homicides'), alt.Tooltip('xfit', title='Week') ]) max = pd.DataFrame({'xs': [1, 500], 'ys': [129, 129]}) record = alt.Chart(max).mark_rule(color='green').encode( y='ys', tooltip=[alt.Tooltip('ys', title='record in 1993')], ) last = pd.DataFrame({'xs': [1, 500], 'ys': [57, 57]}) last = alt.Chart(last).mark_rule(color='teal').encode( y='ys', tooltip=[alt.Tooltip('ys', title='last year 2018')]) second = pd.DataFrame({'xs': [1, 500], 'ys': [115, 115]}) second = alt.Chart(second).mark_rule(color='gray').encode( y='ys', tooltip=[alt.Tooltip('ys', title='2nd highest 1991')]) chart = points + polynomial_fit + record + last + second return JsonResponse(chart.to_dict(), safe=False)
def get(self, request, format=None): qs = Jog.objects.filter(owner=request.user).annotate( week=TruncWeek('date') ).values('week').annotate( total_jogs=Count('id'), total_distance=Sum('distance'), total_time=Sum('time') ).order_by('-week') return Response( JogReportSerializer(qs, many=True).data, status=status.HTTP_200_OK )
def get_alert_count(case_id, group_by): try: organization = Case.objects.get(pk=case_id).organization.id except Case.DoesNotExist: return None cases = Case.objects.filter(organization=organization) counts = [] sum_average = 0 for case in cases: if group_by == "week": queryset = list( Alert.objects.filter(case=case_id).annotate( date_field=TruncWeek("created_at")).values( "date_field").annotate( count=Count("id")).order_by("date_field")) for i in range(len(queryset)): queryset[i]["date_field"] = datetime.datetime.date( queryset[i]["date_field"]) elif group_by == "month": queryset = list( Alert.objects.filter(case=case_id).annotate( date_field=TruncMonth("created_at")).values( "date_field").annotate( count=Count("id")).order_by("date_field")) for i in range(len(queryset)): queryset[i]["date_field"] = datetime.datetime.date( queryset[i]["date_field"]) else: queryset = list( Alert.objects.filter(case=case_id).extra( select={ "date_field": "date( created_at )" }).values("date_field").annotate( count=Count("id")).order_by("date_field")) sum_case_counts = 0 for item in queryset: sum_case_counts += item["count"] interval = AnalyticsUtils.get_interval(case.id, group_by) case_average = (sum_case_counts / interval) if interval > 0 else 0 # case_average = sum_case_counts / AnalyticsUtils.get_interval(case.id, group_by) if int(case_id) == case.id: counts = queryset sum_average += case_average organization_average = sum_average / len(cases) result = {"counts": counts, "average": organization_average} return result
def filter_queryset(self, queryset): queryset = super().filter_queryset(queryset) num_results = queryset.count() if num_results == 0: return queryset early = queryset.earliest("date").date latest = queryset.latest("date").date time_diff = latest - early if num_results > 5000: return (queryset.annotate(date_histogram=TruncYear("date")).values( "date_histogram").order_by("date_histogram").annotate( total=Count("date_histogram"), time_interval=Value("year", CharField()), )) if num_results > 1000 or time_diff.days > 5 * 365: return (queryset.annotate( date_histogram=TruncQuarter("date")).values( "date_histogram").order_by("date_histogram").annotate( total=Count("date_histogram"), time_interval=Value("quarter", CharField()), )) if time_diff.days > 365: return (queryset.annotate( date_histogram=TruncMonth("date")).values( "date_histogram").order_by("date_histogram").annotate( total=Count("date_histogram"), time_interval=Value("month", CharField()), )) if time_diff.days > 90: return (queryset.annotate(date_histogram=TruncWeek("date")).values( "date_histogram").order_by("date_histogram").annotate( total=Count("date_histogram"), time_interval=Value("week", CharField()), )) return (queryset.annotate(date_histogram=TruncDay("date")).values( "date_histogram").order_by("date_histogram").annotate( total=Count("date_histogram"), time_interval=Value("day", CharField())))
def trunc_date_attribute(queryset, attribute_name, trunc_period='day'): """Add truncated attribute to queryset Parameters ---------- queryset : queryset Query object with datetime attribute. attribute_name : str Name of datetime attribute in the query. trunc_period : str Possible options are: 'year', 'quarter', 'month', 'week', 'day', by default 'day' Returns ------- queryset Annotate a new value to queryset having the name `<attribute_name>_truncated` with the truncated date. """ truncated_attribute_name = attribute_name + '_truncated' annotate_kwargs = {} if trunc_period == 'year': annotate_kwargs[truncated_attribute_name] = TruncYear(attribute_name) pass elif trunc_period == 'quarter': annotate_kwargs[truncated_attribute_name] = TruncQuarter( attribute_name) elif trunc_period == 'month': annotate_kwargs[truncated_attribute_name] = TruncMonth(attribute_name) elif trunc_period == 'week': annotate_kwargs[truncated_attribute_name] = TruncWeek(attribute_name) elif trunc_period == 'day': annotate_kwargs[truncated_attribute_name] = TruncDay(attribute_name) else: # Value by default is `day` annotate_kwargs[truncated_attribute_name] = TruncDay(attribute_name) qs = queryset.annotate(**annotate_kwargs) return qs
def get_week_chart(self): # last_sunday = self.last_day(pytz.utc.localize(datetime.today()), 'sunday') qs = ( Request.objects # .filter(timestamp__lte=last_sunday) .annotate(week=TruncWeek("timestamp")).values("week").annotate( hits=Count("pk"))) x, y = [], [] for num, row in enumerate(qs, 1): # trace["x"].append(num) x.append(row["week"].strftime("%Y-%m-%d")) y.append(row["hits"]) trace = go.Scatter(x=x, y=y, text="Hits") layout = go.Layout( title=go.layout.Title(text="Hits per week", xref="paper", x=0), xaxis=go.layout.XAxis(title=go.layout.xaxis.Title(text="Weeks")), yaxis=go.layout.YAxis(title=go.layout.yaxis.Title(text="Hits")), ) return trace, layout
def reports(request): user = request.user if (not user): return Response(status=status.HTTP_401_UNAUTHORIZED) clinic = Clinic.objects\ .annotate(rating=Avg('ratings__rating')) \ .prefetch_related('doctors') \ .get(id=user.adminAccount.employedAt.id) statsMonthly = (Appointment.objects .annotate(month=TruncMonth('date')) .values('month') .annotate(num=Count('id')) .order_by() ) statsDaily = (Appointment.objects .annotate(day=TruncDay('date')) .values('day') .annotate(num=Count('id')) .order_by() ) statsWeekly = (Appointment.objects .annotate(week=TruncWeek('date')) .values('week') .annotate(num=Count('id')) .order_by() ) clinicSerializer = ClinicSerializer(clinic, many=False) doctorSerializer = DoctorSerializer(clinic.doctors.annotate(rating=Coalesce(Avg('ratings__rating'),0)), many=True) return Response(status=status.HTTP_200_OK, data={ 'clinic': clinicSerializer.data, "doctors": doctorSerializer.data, "monthly" : statsMonthly, "daily": statsDaily, "weekly": statsWeekly })
def visitor_chart7(request): startDate = datetime.strptime('2021-02-01', '%Y-%m-%d').date() endDate = date.today() # helper time series function def daterange(date1, date2): for n in range(int((date2 - date1).days) + 1): yield date1 + timedelta(n) # create ordered list with all week start dates between semester start and end dates = set() for week in daterange(startDate, endDate): start = week - timedelta(days=week.weekday()) dates.add(start) dates = sorted(list(dates)) # Set up objects to return for graphing labels = dates data = [{ 'label': "New", 'backgroundColor': "#FFA500", 'data': [0] * len(dates) }, { 'label': "Repeat", 'backgroundColor': "#1C74AF", 'data': [0] * len(dates) }] queryset = Checkin.objects.annotate( weekstart=TruncWeek('date')).order_by('weekstart') for entry in queryset.values(): for i in range(0, len(dates)): if (dates[i] == entry['weekstart']): if (entry['firstTime']): data[0]['data'][i] += 1 else: data[1]['data'][i] += 1 break return JsonResponse(data={'labels': labels, 'data': data})
def get_total_per_week(queryset, date_field, total_expression): # Getting correct week and year of Monday Dec 30th 2019 is tricky, # because ExtractWeek will give correct week number 1, # but ExtractYear will give 2019 instead of 2020. # Thus we have to focus on the last day of the week # instead of the actual day. result = list( queryset.annotate( # TruncWeek truncates to midnight on the Monday of the week. monday_of_week=TruncWeek(date_field)). annotate( # Unfortunately relativedelta is not supported by Django ORM: we get a # `django.db.utils.ProgrammingError: can't adapt type 'relativedelta'` error. sunday_of_week=ExpressionWrapper( F("monday_of_week") + timedelta(days=6, hours=20), DateTimeField())).annotate( year=ExtractYear("sunday_of_week")).annotate( week=ExtractWeek("sunday_of_week")).values( "year", "week").annotate(total=total_expression).order_by( "year", "week")) return result
def visitor_chart4(request): labels = [] data = [] oldestWeek = Checkin.objects.earliest('date') currWeek = date.today() def daterange(date1, date2): for n in range(int((date2 - date1).days) + 1): yield date1 + timedelta(n) dates = set() for week in daterange(getattr(oldestWeek, 'date'), currWeek): start = week - timedelta(days=week.weekday()) dates.add(start) dates = sorted(list(dates)) queryset = Checkin.objects.all().annotate( durationDiff=F('timeOut') - F('timeIn'), duration=(ExtractHour('durationDiff') * 60 + ExtractMinute('durationDiff')), weekstart=TruncWeek('date')).values('weekstart').annotate( sumHours=Sum('duration')).order_by('weekstart') queryData = queryset.values('weekstart', 'sumHours') finalSet = [] for d in dates: hours = 0 for val in queryData: if val['weekstart'] == d: hours = val['sumHours'] finalSet.append({'weekstart': d, 'sumHours': hours}) for entry in finalSet: labels.append(entry['weekstart']) data.append(entry['sumHours'] / 60) return JsonResponse(data={'labels': labels, 'data': data})
def visitor_chart3(request): labels = [] data = [] oldestWeek = Checkin.objects.earliest('date') currWeek = date.today() def daterange(date1, date2): for n in range(int((date2 - date1).days) + 1): yield date1 + timedelta(n) dates = set() for week in daterange(getattr(oldestWeek, 'date'), currWeek): start = week - timedelta(days=week.weekday()) dates.add(start) dates = sorted(list(dates)) queryset = Checkin.objects.annotate( weekstart=TruncWeek('date')).values('weekstart').annotate( count=Count('id')).order_by('weekstart') queryData = queryset.values('weekstart', 'count') finalSet = [] for d in dates: finalCount = 0 for val in queryData: if val['weekstart'] == d: finalCount = val['count'] finalSet.append({'weekstart': d, 'count': finalCount}) for entry in finalSet: labels.append(entry['weekstart']) data.append(entry['count']) return JsonResponse(data={'labels': labels, 'data': data})
def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) # aid count context['nb_live_aids'] = self.search_page.get_base_queryset().count() beginning_of_2021 = timezone.make_aware(datetime(2021, 6, 1)) thirty_days_ago = timezone.now() - timedelta(days=30) seven_days_ago = timezone.now() - timedelta(days=7) # page view count: last 30 days & last 7 days search_events = AidSearchEvent.objects \ .filter(source=self.search_page.slug) context['search_count_total'] = search_events.count() context['search_count_last_30_days'] = search_events \ .filter(date_created__gte=thirty_days_ago) \ .count() context['search_count_last_7_days'] = search_events \ .filter(date_created__gte=seven_days_ago) \ .count() # aid view count: last 30 days & last 7 days view_events = AidViewEvent.objects \ .filter(source=self.search_page.slug) context['aid_view_count_last_30_days'] = view_events \ .filter(date_created__gte=thirty_days_ago) \ .count() context['aid_view_count_last_7_days'] = view_events \ .filter(date_created__gte=seven_days_ago) \ .count() # aid view grouped by week (since 1/1/2021) aid_view_timeseries = view_events \ .filter(date_created__gte=beginning_of_2021) \ .annotate(date_to_week=TruncWeek('date_created')) \ .annotate(day=Func( F('date_to_week'), Value('YYYY-MM-DD'), function='to_char', output_field=CharField())) \ .values('day') \ .annotate(y=Count('id')) \ .order_by('day') context['aid_view_timeseries'] = list(aid_view_timeseries) # top 10 aid viewed top_aid_viewed = view_events \ .select_related('aid') \ .values('aid_id', 'aid__slug', 'aid__name') \ .annotate(view_count=Count('aid_id')) \ .order_by('-view_count') context['top_10_aid_viewed'] = list(top_aid_viewed)[:10] # top 10 targeted_audiences filters if self.search_page.show_audience_field: top_audiences_searched = search_events \ .filter(targeted_audiences__isnull=False) \ .annotate(audience=Func( F('targeted_audiences'), function='unnest')) \ .values('audience') \ .annotate(search_count=Count('id')) \ .order_by('-search_count') # get the display_name of each audience for (index, item) in enumerate(top_audiences_searched): try: top_audiences_searched[index]['audience'] = Aid.AUDIENCES[ item['audience']] # noqa except KeyError: top_audiences_searched[index]['audience'] = item[ 'audience'] # noqa context['top_10_audiences_searched'] = list( top_audiences_searched)[:10] # noqa # top 10 categories filters # if self.search_page.show_categories_field: # top_categories_searched = search_events \ # .prefetch_related(Prefetch('categories', queryset=Category.objects.all())) \ # .exclude(categories=None) \ # .values('categories__name') \ # .annotate(search_count=Count('categories')) \ # .order_by('-search_count') # context['top_10_categories_searched'] = list(top_categories_searched)[:10] # noqa # top 10 keywords searched top_keywords_searched = search_events \ .exclude(text__isnull=True).exclude(text__exact='') \ .values('text') \ .annotate(search_count=Count('id')) \ .order_by('-search_count') context['top_10_keywords_searched'] = list(top_keywords_searched)[:10] return context
def count_by_title_weekly(self): return (Expenses.objects.annotate( week=TruncWeek('created_at')).values('week').annotate( count=Count('title')).annotate( total=Sum(Cast('amount', FloatField()))).values( 'week', 'count', 'total', 'title').order_by())
def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) # aid count context['nb_live_aids'] = self.search_page.get_base_queryset().count() beginning_of_2021 = timezone.make_aware(datetime(2021, 1, 1)) thirty_days_ago = timezone.now() - timedelta(days=30) seven_days_ago = timezone.now() - timedelta(days=7) # view count: all-time context['view_count_total'] = get_matomo_stats_from_page_title( page_title=self.search_page.meta_title or self.search_page.title, from_date_string=self.search_page.date_created.strftime( '%Y-%m-%d'), # noqa result_key='nb_hits') # view count: last 30 days context['view_count_last_30_days'] = get_matomo_stats_from_page_title( page_title=self.search_page.meta_title or self.search_page.title, from_date_string=thirty_days_ago.strftime('%Y-%m-%d'), result_key='nb_hits') # view count: last 7 days context['view_count_last_7_days'] = get_matomo_stats_from_page_title( page_title=self.search_page.meta_title or self.search_page.title, from_date_string=seven_days_ago.strftime('%Y-%m-%d'), result_key='nb_hits') # aid view count: last 30 days & last 7 days view_events = AidViewEvent.objects \ .filter(source=self.search_page.slug) context['aid_view_count_last_30_days'] = view_events \ .filter(date_created__gte=thirty_days_ago) \ .count() context['aid_view_count_last_7_days'] = view_events \ .filter(date_created__gte=seven_days_ago) \ .count() # group views by week, from 1/1/2021 aid_view_timeseries = view_events \ .filter(date_created__gte=beginning_of_2021) \ .annotate(date_to_week=TruncWeek('date_created')) \ .annotate(day=Func( F('date_to_week'), Value('YYYY-MM-DD'), function='to_char', output_field=CharField())) \ .values('day') \ .annotate(y=Count('id')) \ .order_by('day') context['aid_view_timeseries'] = list(aid_view_timeseries) # top 10 aid viewed top_aid_viewed = view_events \ .select_related('aid') \ .values('aid_id', 'aid__slug', 'aid__name') \ .annotate(view_count=Count('aid_id')) \ .order_by('-view_count') context['top_10_aid_viewed'] = list(top_aid_viewed)[:10] # search count search_events = AidSearchEvent.objects \ .filter(source=self.search_page.slug) context['search_events_total'] = search_events.count() # top 10 targeted_audiences filters top_audiences_searched = search_events \ .filter(targeted_audiences__isnull=False) \ .annotate(audience=Func( F('targeted_audiences'), function='unnest')) \ .values('audience') \ .annotate(search_count=Count('id')) \ .order_by('-search_count') # get the display_name of each audience for (index, item) in enumerate(top_audiences_searched): top_audiences_searched[index]['audience'] = Aid.AUDIENCES[ item['audience']] # noqa context['top_10_audiences_searched'] = list( top_audiences_searched)[:10] # noqa # top 10 categories filters top_categories_searched = search_events \ .prefetch_related('categories') \ .exclude(categories=None) \ .values('categories__id', 'categories__name') \ .annotate(search_count=Count('categories__id')) \ .order_by('-search_count') context['top_10_categories_searched'] = list( top_categories_searched)[:10] # noqa # top 10 keywords searched top_keywords_searched = get_matomo_stats( api_method='Actions.getSiteSearchKeywords', custom_segment= f'pageUrl=@{self.search_page.slug}.aides-territoires.beta.gouv.fr', # noqa from_date_string=seven_days_ago.strftime('%Y-%m-%d')) if top_keywords_searched == list: context['top_10_keywords_searched'] = sorted( top_keywords_searched, key=lambda k: k['nb_hits'], reverse=True)[:10] # noqa return context
def get_stock_value(category_ids, product_ids, start_date, end_date, period='year', group_by='product'): ''' TODO : add default parameters Return list of sales of one product between a range of date [{'inventory_date': * , 'product_quantity': *, 'total_cost': *}] ''' # Initial parameters annotate_kwargs = {} values_args = [] # Add attributes depending on selected period if period == 'year': annotate_kwargs = { 'inventory_date_truncated': TruncYear('inventory_date')} values_args.append('inventory_date_truncated') elif period == 'quarter': annotate_kwargs = { 'inventory_date_truncated': TruncQuarter('inventory_date')} values_args.append('inventory_date_truncated') elif period == 'month': annotate_kwargs = { 'inventory_date_truncated': TruncMonth('inventory_date')} values_args.append('inventory_date_truncated') elif period == 'week': annotate_kwargs = { 'inventory_date_truncated': TruncWeek('inventory_date')} values_args.append('inventory_date_truncated') elif period == 'day': annotate_kwargs = { 'inventory_date_truncated': TruncDay('inventory_date')} values_args.append('inventory_date_truncated') else: # Value by default is `year` annotate_kwargs = { 'inventory_date_truncated': TruncYear('inventory_date')} values_args.append('inventory_date_truncated') pass # Group data by category or products if group_by == 'product': values_args.append('stock__product__reference') elif group_by == 'category': values_args.append('stock__product__category__reference') else: # Default value values_args.append('stock__product__reference') # def avg_quantity_by_product_in_date_range(delivered_at_start_date, delivered_at_end_date, outref_product='product'): # ''' # ''' # queryset = DeliveryDetail.objects.filter( # product=OuterRef('product'), # sale__delivered_at__gte=delivered_at_start_date, # sale__delivered_at__lte=delivered_at_end_date # ).values( # 'product__reference' # ).annotate( # avg_quantity=Avg('delivered_quantity') # ).values('avg_quantity') # return queryset queryset = StockControl.objects.filter( Q(inventory_date__gte=start_date) & Q(inventory_date__lte=end_date), stock__product__category__in=category_ids, stock__product__in=product_ids, ).annotate(**annotate_kwargs ).values(*values_args ).annotate( avg_cost=Avg(ExpressionWrapper( F('product_quantity') * F('stock__product__cost'), output_field=FloatField())), avg_quantity=Avg(ExpressionWrapper( F('product_quantity'), output_field=FloatField())), avg_package=Avg(ExpressionWrapper( F('product_quantity') / F('stock__product__package'), output_field=IntegerField())), avg_pallet=Avg(ExpressionWrapper( F('product_quantity') / F('stock__product__pallet'), output_field=IntegerField())), avg_weight=Avg(ExpressionWrapper( F('product_quantity') * F('stock__product__weight'), output_field=FloatField())), avg_volume=Avg(ExpressionWrapper( F('product_quantity') * F('stock__product__volume'), output_field=FloatField())), avg_dio=Avg(ExpressionWrapper( F('product_quantity') / Subquery( DeliveryDetail.objects.filter( stock__product=OuterRef('stock__product'), sale__delivered_at__gte=start_date, # _start_date, sale__delivered_at__lte=end_date, # _send_date ).values('stock__product__reference' ).annotate( avg_quantity=Avg('delivered_quantity') ).values('avg_quantity') ), output_field=DecimalField(decimal_places=2) )) ) return queryset
def get_event_stats(self, request): """ Given a method parameter, returns event data in the specified format. Can return 'count' statistics or the original Event data. TODO: stats by device type, geographic region, time of day/week """ output = [] queryset = self.filter_by_link_or_user() time = request.query_params.get('time', None) method = request.query_params.get('method', None) # Gets all click counts per link for the given user. Also retrieves empty (unclicked user links) if method is not None: if method.lower() == 'links': result = [] queryset = queryset.values( 'link', 'date').annotate(count=Count('link')) links_queryset = Link.objects.all() links_queryset = Link.objects.filter( creator__username=self.request.user) link_ids = [] for q in queryset: link = LinkSerializer(Link.objects.get(pk=q['link']), many=False).data link_ids.append(link['id']) result.append({ 'id': link['id'], 'title': link['text'], 'date': link['created'], 'url': link['url'], 'img': link['image'], 'media_prefix': link['media_prefix'], 'count': q['count'], }) for link in links_queryset: if link.id not in link_ids: result.append({ 'id': link.id, 'title': link.text, 'date': link.created, 'url': link.url, 'img': link.image if link.image else None, 'media_prefix': link.media_prefix, 'count': 0, }) (link_csv, link_json) = self.generate_links_csv(result) return Response({ 'data': result, 'raw_csv': link_csv, 'raw': link_json, }) if time is not None: time = time.lower() if time not in ['daily', 'weekly', 'monthly', 'yearly']: if time is None: queryset = self.filter_by_month_or_year(queryset) else: queryset = self.filter_by_time(queryset) if method is not None and method.lower() == 'count': return Response({'count': queryset.count()}) serializer = self.serializer_class(queryset, many=True) return Response(serializer.data) queryset = self.filter_by_time(queryset) # generate csv data (raw_data, raw_json) = self.generate_csv(queryset) queryset = { 'daily': queryset.annotate(period=TruncDate('date')), 'weekly': queryset.annotate(period=TruncWeek('date')), 'monthly': queryset.annotate(period=TruncMonth('date')), 'yearly': queryset.annotate(period=TruncYear('date')), }.get(time).order_by('period') # Fetches Event entry by primary key output = {} queryset = queryset \ .values('period') \ .annotate(event_ids=ArrayAgg('id')) \ .values('period', 'event_ids') for q in queryset: data = list( EventSerializer(Event.objects.get(pk=id), many=False).data for id in q['event_ids']) output[q['period']] = { 'period': q['period'], 'count': len(data), 'events': data, } daily_data = self.get_daily_data(output) return Response({ 'data': daily_data, 'raw_csv': raw_data, 'raw': raw_json })
def get_charts_json(request): # all_account = AppleAccountModel.objects.filter(used=True) has_upload = AppleAccountModel.objects.filter(used=True).exclude( upload_date__isnull=True) # 根据状态区分 status_dic = { "labels": [], 'datasets': [], } status_charts_data = [] for item in has_upload: astatus = item.get_status_display() if not astatus in status_dic['labels']: status_dic['labels'].append(astatus) for status_name in status_dic['labels']: count = 0 for item in has_upload: if item.get_status_display() == status_name: count += 1 status_charts_data.append(count) # backgroundColors = chartColors[:len(status_charts_data)] if status_charts_data.__len__() <= chartColors.__len__(): backgroundColors = chartColors[:len(status_charts_data)] else: backgroundColors = chartColors for i in range( 0, status_charts_data.__len__() - chartColors.__len__()): backgroundColors.append(chartColors[i]) upload_dataset = { 'data': status_charts_data, 'backgroundColor': backgroundColors, } status_dic['datasets'].append(upload_dataset) # 根据用户区分 account_dic = { "labels": [], 'datasets': [], } account_charts_data = [] for item in has_upload: username = item.user.username if not username in account_dic['labels']: account_dic['labels'].append(username) for username in account_dic['labels']: count = 0 for item in has_upload: if item.user.username == username: count += 1 account_charts_data.append(count) # backgroundColors = chartColors[:len(account_charts_data)] if account_charts_data.__len__() <= chartColors.__len__(): backgroundColors = chartColors[:len(account_charts_data)] else: backgroundColors = chartColors for i in range( 0, account_charts_data.__len__() - chartColors.__len__()): backgroundColors.append(chartColors[i]) account_dataset = { 'data': account_charts_data, 'backgroundColor': backgroundColors, } account_dic['datasets'].append(account_dataset) # 根据每周划分 week_data = has_upload.annotate( week=TruncWeek('upload_date')).values('week').annotate( c=Count('id')).order_by() week_dic = { 'labels': [], 'datasets': [], } week_charts_data = [] for item in week_data: week_dic['labels'].append(item['week'].__str__()) week_charts_data.append(item['c']) if week_charts_data.__len__() <= chartColors.__len__(): backgroundColors = chartColors[:len(week_charts_data)] else: backgroundColors = chartColors for i in range(0, week_charts_data.__len__() - chartColors.__len__()): backgroundColors.append(chartColors[i]) week_dataset = { 'data': week_charts_data, 'backgroundColor': backgroundColors, } week_dic['datasets'].append(week_dataset) # print(json.dumps(status_dic)) return JsonResponse( { 'status_dic': status_dic, 'user_dic': account_dic, 'week_dic': week_dic }, safe=False)
def Dashboard(request): if not request.user.username: return redirect('/login/?next=%s' % request.path) # ambil parameter get # bisa ubah time interval reports_reported = Report.objects.annotate(interval=TruncWeek('date_reported') ).values('interval' ).annotate(reported=Count('id') ).values('interval', 'reported') reports_taken = Report.objects.exclude(date_last_progress = None ).annotate(interval=TruncWeek('date_last_progress') ).values('interval' ).annotate(taken=Count('progress') ).values('interval', 'taken') reports_not_taken_yet = Report.objects.filter(progress=None ).annotate(interval=TruncWeek('date_reported') ).values('interval' ).annotate(not_taken_yet=Count('id') ).values('interval', 'not_taken_yet') reports_in_checking = Report.objects.filter(progress__lte=3 ).annotate(interval=TruncWeek('date_reported') ).values('interval' ).annotate(in_checking=Count('id') ).values('interval', 'in_checking') reports_not_approved = Report.objects.filter(progress=4 ).annotate(interval=TruncWeek('date_reported') ).values('interval' ).annotate(not_approved=Count('id') ).values('interval', 'not_approved') reports_on_progress = Report.objects.filter(progress=5 ).annotate(interval=TruncWeek('date_reported') ).values('interval' ).annotate(on_progress=Count('id') ).values('interval', 'on_progress') reports_finished = Report.objects.filter(progress__gte=6 ).annotate(interval=TruncWeek('date_reported') ).values('interval' ).annotate(finished=Count('id') ).values('interval', 'finished') intervals = [] for report in reports_reported: intervals.append(report['interval']) for report in reports_taken: intervals.append(report['interval']) labels = [min(intervals)] temp = min(intervals) while temp < max(intervals): temp = temp + timedelta(days=7) labels.append(temp) data = {'reported' : [], 'taken' : [], 'not_taken_yet' : [], 'in_checking' : [], 'not_approved' : [], 'on_progress' : [], 'finished' : []} for label in labels: temp = 0 for report in reports_reported: if label == report['interval']: temp = report['reported'] if temp: data['reported'].append(temp) else: data['reported'].append(0) temp = 0 for report in reports_taken: if label == report['interval']: temp = report['taken'] if report['interval'] in labels: data['taken'].append(temp) else: data['taken'].append(0) temp = 0 for report in reports_not_taken_yet: if label == report['interval']: temp = report['not_taken_yet'] if report['interval'] in labels: data['not_taken_yet'].append(temp) else: data['not_taken_yet'].append(0) temp = 0 for report in reports_in_checking: if label == report['interval']: temp = report['in_checking'] if report['interval'] in labels: data['in_checking'].append(temp) else: data['in_checking'].append(0) temp = 0 for report in reports_not_approved: if label == report['interval']: temp = report['not_approved'] if report['interval'] in labels: data['not_approved'].append(temp) else: data['not_approved'].append(0) temp = 0 for report in reports_on_progress: if label == report['interval']: temp = report['on_progress'] if report['interval'] in labels: data['on_progress'].append(temp) else: data['on_progress'].append(0) temp = 0 for report in reports_finished: if label == report['interval']: temp = report['finished'] if report['interval'] in labels: data['finished'].append(temp) else: data['finished'].append(0) # print([label.strftime("%d %b %y") for label in labels]) signed_in_user = request.user context = {'data' : data, 'labels' : labels, 'template_test': auth_template(signed_in_user)['template_test']} return render(request, 'report/dashboard.html', context)
def with_next_date(self): """Returns the next date this event occurs. For one-time events, this will just be the "starts" value. For repeating events we need to calculate the next date based on the repeating criteria from the time the repetition begins, i.e. at or after the start date. Returns: QuerySet """ now = timezone.now() return self.annotate( start_of_day=TruncDay(Now()), start_of_week=TruncWeek(Now()), day_of_week=Cast(ExtractWeekDay(models.F("starts")), models.IntegerField()), # base date : we first calculate a base date, and add the correct interval # to this date to get the next date. base_date=models.Case( # starts today: just use today models.When( models.Q( starts__day=now.day, starts__month=now.month, starts__year=now.year, ), then=Now(), ), # repeats daily: from this morning models.When( models.Q(repeats=self.model.RepeatChoices.DAILY), then=models.F("start_of_day"), ), # repeats weekly: from first day of week (Sunday) # Note: starts with MONDAY and is indexed from 1, so need to adjust by -2. models.When( models.Q(repeats=self.model.RepeatChoices.WEEKLY), then=DateAdd(models.F("start_of_week"), models.F("day_of_week") - 2), ), # repeats monthly: from 1st of this month models.When( models.Q(repeats=self.model.RepeatChoices.MONTHLY), then=TruncMonth(Now()), ), default=models.F("starts"), output_field=models.DateTimeField(), ), next_date=models.Case( # the start date. models.When( models.Q(starts__gte=models.F("base_date")) & models.Q(starts__gte=Now()), then=models.F("starts"), ), # daily: base date + 1 day models.When( repeats=self.model.RepeatChoices.DAILY, then=DateAdd(models.F("base_date"), 1), ), # weekly: base date + 7 days models.When( repeats=self.model.RepeatChoices.WEEKLY, then=DateAdd(models.F("base_date"), 7), ), # monthly: base date + 1 month (i.e. 1st of next month) models.When( repeats=self.model.RepeatChoices.MONTHLY, then=MonthAdd(models.F("base_date"), 1), ), # yearly: base date + 1 year models.When( repeats=self.model.RepeatChoices.YEARLY, then=YearAdd(models.F("base_date"), 1), ), default=models.F("starts"), output_field=models.DateTimeField(), ), )