def get_queryset(self): year = date.today().year - 1 queryset = Entry.objects.filter( Q(author=self.request.user) and Q(date__year=year)) statistics = queryset.annotate( week=ExtractWeek('date')).values('week').annotate( total_distance=Sum('distance'), total_duration=Sum('duration'), entries_count=Count('id')).annotate( weekly_average_speed=Sum('distance') / Sum('duration')).order_by(ExtractWeek('date')) return statistics
def get(self, request, category): all_events = (Event.objects.annotate( year=ExtractYear('start_datetime')).annotate( start_week=ExtractWeek('start_datetime'))) today = datetime.datetime.today() current_year = today.isocalendar()[0] current_week = today.isocalendar()[1] events = [] if category == 'current_week': for event in all_events: if event.year == current_year and event.start_week == current_week: events.append(event) elif category == 'future_events': for event in all_events: if (event.year == current_year and event.start_week > current_week) or event.year > current_year: events.append(event) else: for event in all_events: if (event.year == current_year and event.start_week < current_week) or event.year < current_year: events.append(event) menu_items = [] for event in events: menu_items.append( MenuItem(description=truncatechars(event.description, 30), method='GET', path=event.get_absolute_url())) content = Menu(body=menu_items, header='menu') return self.to_response(content)
def report(request, report_type): report_type = report_type orders = Order.objects.filter(finish_flag=True) if report_type == 'month': orders = orders.annotate( time=Trunc('date', 'month')).values('time').annotate( count=Count('id'), total_income=Sum('total_price')) elif report_type == 'day': orders = orders.annotate( time=Trunc('date', 'day')).values('time').annotate( count=Count('id'), total_income=Sum('total_price')) elif report_type == 'year': orders = orders.annotate( time=Trunc('date', 'year')).values('time').annotate( count=Count('id'), total_income=Sum('total_price')) elif report_type == 'week': orders = orders.annotate( time=Trunc('date', 'week')).values('time').annotate( count=Count('id'), total_income=Sum('total_price'), week=ExtractWeek('time')) return render(request, 'report/report.html', context={ 'orders': orders, 'type': report_type, })
def userpage(request): userid = my_view(request) bg_owned_list = Player.objects.get(id=userid).get_owned(userid) data = { 'weight': [], 'mechanics': [], 'category': [], 'rank': [], 'designer': [], } for bg_id in bg_owned_list[:52]: bgg_info = get_bg_cmd(bg_id) # data['rank'].append(bgg_info['rank']) data['mechanics'].extend(bgg_info['mechanics']) data['category'].extend(bgg_info['category']) # data['weight'].append(bgg_info['weight']) data['designer'].extend(bgg_info['designer']) context = { 'NoG': len(bg_owned_list), 'MoC': Counter(data['category']).most_common(3), 'MoM': Counter(data['mechanics']).most_common(3), 'MoD': Counter(data['designer']).most_common(3), } games_list = Player.objects.get(id=userid).get_played(userid) games_list = games_list.annotate( year=ExtractIsoYear('date'), month=ExtractMonth('date'), week=ExtractWeek('date'), ) # gameplays vs previous week curr_year, curr_week, _ = datetime.date.today().isocalendar() prev_year, prev_week, _ = (datetime.date.today() - datetime.timedelta(days=7)).isocalendar() week_diff = [ games_list.filter(year=curr_year, week=curr_week).count(), -games_list.filter(year=prev_year, week=prev_week).count(), ] # gameplays vs previous month curr_month = datetime.date.today().month prev_month = curr_month - 1 if curr_month > 1 else 12 prev_year = curr_year if curr_month > 1 else curr_year - 1 month_diff = [ games_list.filter(year=curr_year, month=curr_month).count(), -games_list.filter(year=prev_year, month=prev_month).count(), ] # gameplays vs previous month year_diff = [ games_list.filter(year=curr_year).count(), -games_list.filter(year=prev_year).count(), ] context.update({ 'week_diff': week_diff, 'month_diff': month_diff, 'year_diff': year_diff, }) context.update(new_game_in_library(request, userid)) context.update(new_exp_in_library(request, userid)) return render(request, 'polls/userpage.html', context)
def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) profile = Profile.objects.filter(user=self.request.user) query = Earns.objects.filter(author__in=profile) earnings_per_month = query.filter( date__range=[start_date, end_date]).annotate( month=ExtractMonth('date')).values('month').annotate( sum=Sum('quantity')).order_by() earnings_per_week = query.filter( date__range=[month_start, month_end]).annotate( week=ExtractWeek('date')).values('week').annotate( sum=Sum('quantity')).order_by('week') earnings_per_category = query.values('category__name').annotate( sum=Sum('quantity')).order_by('category') earnings_last_five_years = query.filter( date__range=[initial_year, final_year]).annotate( year=ExtractYear('date')).values('year').annotate( sum=Sum('quantity')).order_by('year') context["earnings_per_month"] = earnings_per_month context["earnings_per_week"] = earnings_per_week context["earnings_per_category"] = earnings_per_category context["earnings_last_five_years"] = earnings_last_five_years return context
def test_extract_week_func_boundaries(self): end_datetime = microsecond_support( datetime(2016, 6, 15, 14, 10, 50, 123)) if settings.USE_TZ: end_datetime = timezone.make_aware(end_datetime, is_dst=False) week_52_day_2014 = microsecond_support(datetime(2014, 12, 27, 13, 0)) # Sunday week_1_day_2014_2015 = microsecond_support( datetime(2014, 12, 31, 13, 0)) # Wednesday week_53_day_2015 = microsecond_support(datetime(2015, 12, 31, 13, 0)) # Thursday if settings.USE_TZ: week_1_day_2014_2015 = timezone.make_aware(week_1_day_2014_2015, is_dst=False) week_52_day_2014 = timezone.make_aware(week_52_day_2014, is_dst=False) week_53_day_2015 = timezone.make_aware(week_53_day_2015, is_dst=False) days = [week_52_day_2014, week_1_day_2014_2015, week_53_day_2015] self.create_model(week_53_day_2015, end_datetime) self.create_model(week_52_day_2014, end_datetime) self.create_model(week_1_day_2014_2015, end_datetime) qs = DTModel.objects.filter(start_datetime__in=days).annotate( extracted=ExtractWeek('start_datetime'), ).order_by( 'start_datetime') self.assertQuerysetEqual(qs, [ (week_52_day_2014, 52), (week_1_day_2014_2015, 1), (week_53_day_2015, 53), ], lambda m: (m.start_datetime, m.extracted))
class StatisticView(LoggingMixin, mixins.ListModelMixin, viewsets.ViewSetMixin, generics.GenericAPIView): queryset = Event.objects.values(week=ExtractWeek('createdAt')).annotate( year=ExtractYear('createdAt')).values('week', 'year').distinct().order_by( 'year', 'week') serializer_class = StatisticSerializer
def weekly_income(request): facility_income = (FacilityReservation.objects.annotate( year=ExtractYear('start_time')).annotate( week=ExtractWeek('start_time')).values( 'year', 'week').annotate(income=Sum('total_amount'))) vehicle_income = (VehicleReservation.objects.annotate( year=ExtractYear('start_time')).annotate( week=ExtractWeek('start_time')).values( 'year', 'week').annotate(income=Sum('total_amount'))) income_list = chain(facility_income, vehicle_income) for income in facility_income: if income['week']: week = "{year}-W{week}-1".format(year=income['year'], week=income['week']) timestamp = dt.datetime.strptime(week, "%Y-W%W-%w") d = str(income['year']) + "-W" + str(income['week']) start = dt.datetime.strptime(d + '-1', '%G-W%V-%u') income['week'] = dt.datetime.strftime(start, '%b %d, %Y') income['end_week'] = dt.datetime.strftime( start + dt.timedelta(days=6), '%b %d, %Y') income['income'] = str(income['income']) print(week) print(income['week']) for income in vehicle_income: if income['week']: week = "{year}-W{week}-1".format(year=income['year'], week=income['week']) timestamp = dt.datetime.strptime(week, "%Y-W%W-%w") d = str(income['year']) + "-W" + str(income['week']) start = dt.datetime.strptime(d + '-1', '%G-W%V-%u') income['week'] = dt.datetime.strftime(start, '%b %d, %Y') income['end_week'] = dt.datetime.strftime( start + dt.timedelta(days=6), '%b %d, %Y') income['income'] = str(income['income']) print(week) print(income['week']) context = { 'income_list': income_list, } return render(request, 'facility/income.html', context)
def get_weeklydata(self, request): records = self.get_queryset() weekdata = records\ .annotate(year=ExtractYear('date')) \ .annotate(week=ExtractWeek('date')) \ .values('week', 'year') \ .annotate(totalDistance=Sum('distance')) \ .annotate(totalTime=Sum('time')); return Response(weekdata);
def WeeklyReport(request): report = Entry.objects \ .annotate(year=ExtractYear('date')) \ .annotate(week=ExtractWeek('date')) \ .values('year', 'week') \ .annotate(totalDistance=Sum('distance')) \ .annotate(totalDuration=Sum('duration')) return Response(report)
def test_extract_week_func(self): start_datetime = microsecond_support(datetime(2015, 6, 15, 14, 30, 50, 321)) end_datetime = microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123)) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractWeek('start_datetime')).order_by('start_datetime'), [(start_datetime, 25), (end_datetime, 24)], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractWeek('start_date')).order_by('start_datetime'), [(start_datetime, 25), (end_datetime, 24)], lambda m: (m.start_datetime, m.extracted) ) # both dates are from the same week. self.assertEqual(DTModel.objects.filter(start_datetime__week=ExtractWeek('start_datetime')).count(), 2)
def get_query(self, **kwargs): qs = self.model.objects.filter(day__gte=self.start, day__lte=self.end, **kwargs) if self.period == 'weekly': qs = qs.annotate(week=ExtractWeek('day'), year=ExtractISOYear('day')) return qs.order_by('year', 'week').values('year', 'week') elif self.period == 'monthly': qs = qs.annotate(period=TruncMonth('day')) elif self.period == 'yearly': qs = qs.annotate(period=TruncYear('day')) return qs.order_by('period').values('period')
def admin_index(request): staffs = Staff.objects.all() orders = MstOrder.objects.all() all_orders = orders.annotate(month=TruncMonth('date_ordered')).values('month').annotate(total=Count('date_ordered')) deliveries = deliveries_tbl.objects.filter(staff_to_deliver=1, ) #all = deliveries.annotate(month=TruncMonth('delivery_date')).values('month').annotate(total=Count('id')) delivery = deliveries_tbl.objects.filter(staff_to_deliver__user__is_superuser=False) queried = delivery.annotate(week=ExtractWeek('delivery_date')).values('week','staff_to_deliver__user__username').annotate(total=Count('mstOrder_id')) arg = { 'queried':queried, } template_name = 'manager/index2.html' return render(request, template_name,arg)
def statistic(request): last_year = datetime.datetime.now().year - 1 login = request.user.id data_set = Add_data.objects.filter( author=login, date_time__year__gte=last_year).annotate( week=ExtractWeek('date_time')).values('week').annotate( all_records=Count('speed')).annotate( sum_distance=Sum('distance')).annotate( total_duration=Sum('duration')).annotate( average_speed=Avg(F('speed'))).order_by('week') table = StatTable(data_set) RequestConfig(request).configure(table) return render(request, 'myapp/stat.html', {"table": table})
def ticket_ranking_progress(request): """ A view that displays all data visualizations regarding issue tracker. Currently line charts of tickets tended is set to display data depending on today's date and user has no control over that (as in they cannot choose what date the line charts are based on). """ today = datetime.datetime.today() bug_ranking = Ticket.objects.filter(type="T1") \ .annotate(upvote_count=Count('upvote_user')) \ .order_by('-upvote_count') \ [:5] feature_request_ranking = Ticket.objects.filter( type="T2").order_by('-upvote_fund')[:5] progress_log = ProgressLog.objects.all() pl_daily_filter = ProgressLog.objects.filter( date__range=(today - datetime.timedelta(days=6), today)).order_by('date') pl_weekly_filter = ProgressLog.objects \ .filter(date__year=today.year) \ .filter(date__month=today.month) \ .annotate(year=ExtractYear('date'), week=ExtractWeek('date')) \ .values('year', 'week') \ .annotate(week_bug_total=Sum('bug_tended')) \ .annotate(week_feature_total=Sum('feature_tended')) \ .order_by('week') pl_monthly_filter = ProgressLog.objects \ .filter(date__year=today.year) \ .annotate(year=ExtractYear('date'), month=ExtractMonth('date')) \ .values('year', 'month') \ .annotate(month_bug_total=Sum('bug_tended')) \ .annotate(month_feature_total=Sum('feature_tended')) \ .order_by('month') return render( request, "ticketrankingprogress.html", { 'bug_ranking': bug_ranking, 'feature_request_ranking': feature_request_ranking, 'progress_log': progress_log, 'pl_daily_filter': pl_daily_filter, 'pl_weekly_filter': pl_weekly_filter, 'pl_monthly_filter': pl_monthly_filter, })
def published_at_complete(self): if not self._published_at_plot_data: published_at_plot_data = dict() published_at_plot_data['label'] = self.name published_at_plot_data['color'] = self.color published_at_plot_data['x'] = list() published_at_plot_data['total'] = list() start_date = datetime(2020, 1, 1) delta = datetime.now().date() - start_date.date() for i in range(int((delta.days + 1) / 7 + 1)): year = (start_date + timedelta(days=i * 7)).isocalendar()[0] week = (start_date + timedelta(days=i * 7)).isocalendar()[1] published_at_plot_data['x'].append({ "year": year, "week": week }) published_at_plot_data['total'].append(0) for published_count in self._category.papers.filter(published_at__gt=start_date) \ .annotate(year=ExtractYear('published_at'))\ .annotate(week=ExtractWeek('published_at')).values('year', 'week').annotate(papers_added=Count('doi')).order_by('year','week'): try: index = published_at_plot_data['x'].index({ "year": published_count['year'], "week": published_count['week'] }) published_at_plot_data['total'][index] = published_count[ 'papers_added'] except ValueError: print("Not found ", published_count['year'], published_count['week']) self._published_at_plot_data = json.dumps(published_at_plot_data, cls=DjangoJSONEncoder) return self._published_at_plot_data
def introduction_graph(): start_semester_2 = datetime.datetime(2021, 1, 11) end_semester_2 = datetime.datetime(2021, 3, 29) seminars = Seminar.objects.filter(start_time__gte=start_semester_2, end_time__lt=end_semester_2) seminars_by_week = seminars.annotate( week=ExtractWeek('start_time')).values('week').annotate( number_seminars=Count('week')) seminars_by_week = sorted(seminars_by_week, key=lambda week: week['week']) weeks = [seminar['week'] - 1 for seminar in seminars_by_week] number_seminars = [ seminar['number_seminars'] for seminar in seminars_by_week ] average_seminars = np.mean(number_seminars) standard_deviation = np.std(number_seminars) total_seminars = np.sum(number_seminars) print() print('Total number of seminars: ' + str(total_seminars)) print('Average per week: ' + str(average_seminars)) print('Standard deviation: ' + str(standard_deviation)) print() intro_graph = plt.figure() plt.bar(weeks, number_seminars) plt.title('Seminars in semester two of session 2020/2021') plt.xlabel('Week') plt.ylabel('No. of Seminars') plt.xticks(weeks) plt.yticks(range(0, 24, 4)) plt.axhline(average_seminars, linewidth=1, color='red', ls='--', label='Average no. of seminars') plt.legend() plt.show() intro_graph.savefig('semester_2_seminars.pdf', bbox_inches='tight')
def report(self, request, username=None): "Return a report on average speed & distance per week" activities_avg_by_week = (self.get_object().activities.values( "date", "distance", "duration").annotate(year=ExtractYear("date")).annotate( week=ExtractWeek("date")).values("year", "week").annotate( sum_distance=Sum("distance"), sum_duration=Sum("duration"), )) page = self.paginate_queryset(activities_avg_by_week) if page is not None: serializer = ActivityReportSerializer(page, many=True) return self.get_paginated_response(serializer.data) serializer = ActivityReportSerializer(activities_avg_by_week, many=True) return Response(serializer.data)
def applicant_analysis(request): try: from_dt = datetime.datetime.strptime(request.GET.get('from', ''), "%Y-%m-%d").date() to_dt = datetime.datetime.strptime(request.GET.get('to', ''), "%Y-%m-%d").date() except ValueError: return HttpResponseBadRequest() if not from_dt or not to_dt: return HttpResponseBadRequest() applications = Application.objects \ .filter(created_dt__gte=from_dt, created_dt__lte=to_dt) \ .annotate(week=ExtractWeek('created_dt'), year=ExtractYear('created_dt'), month=ExtractMonth('created_dt')) \ .values_list('week', 'year', 'step', 'month') # group data by year+week+step data = {} for application in applications: week = application[0] year = application[1] step = Application.step_str(application[2]) month = application[3] # special stuff for last week of year / first week of year.. if week == 52 and month == 1: year -= 1 elif week == 1 and month == 12: year += 1 key = (week, year) # key is a (week, year) tuple curr = data.get(key, {}) curr[step] = curr.get(step, 0) + 1 data[key] = curr # convert year-week to date ranges and data into funnel new_data = {} for ((week, year), value) in data.iteritems(): d = "%s-W%s" % (year, week) from_date = datetime.datetime.strptime("%s-1" % d, "%Y-W%W-%w") to_date = datetime.datetime.strptime("%s-0" % d, "%Y-W%W-%w") new_key = "%s-%s" % (from_date.strftime("%Y-%m-%d"), to_date.strftime("%Y-%m-%d")) new_data[new_key] = _convert_data_to_funnel(value) return HttpResponse(simplejson.dumps(new_data), content_type="application/json")
def papers_per_week(self): documents = self.topic.documents documents = documents.annotate(year=ExtractYear('date')) \ .annotate(week=ExtractWeek('date')) \ .values('year', 'week').order_by('year', 'week') \ .annotate(count=Count('*')) data = [] total = 0 for record in documents: week = "{year}-W{week}".format(year=record['year'], week=record['week']) count = record['count'] total += count data.append((week, count, total)) data = list(zip(*data)) return json.dumps(dict(weeks=data[0], counts=data[1], totals=data[2]))
def test_window_functions_can_find_price_drops(self): product = ProductFactory() # factory_boy doesn't support bulk_create :( for x in range(0, 100, 5): TrackPointFactory( product=product, timestamp=timezone.now() - dt.timedelta(days=x), price=random.randint(100, 999), ) qs = TrackPoint.objects.annotate( min_price=Window( expression=Min('price'), partition_by=[F('product')], order_by=ExtractWeek('timestamp').asc(), ), price_drop=F('price') - F('min_price'), ).order_by('-price_drop') for x in qs: pass
def get(self, request): product_id = request.query_params['product_id'] today = datetime.now() weekday = today.weekday() current_week = Sale.objects.filter( updated_at__date=(datetime.now().date())).annotate( week=ExtractWeek('updated_at')).values_list('week', flat=True) if len(current_week) != 0: sale_ids = Sale.objects.prefetch_related('product').filter( product__id=product_id).values_list('id', flat=True) avg_current_week_sale = Sale.objects.filter( id__in=sale_ids, week_id=current_week[0]).aggregate( Avg('total_sale'))['total_sale__avg'] # last week of year if current_week[0] == 52: weekly_sale_avg = Sale.objects.filter( Q(id__in=sale_ids), Q(week_id__range=(1, current_week[0]), year_id=today.year)) \ .values('week_id') \ .annotate(weekly_sale=Avg('total_sale')) \ .aggregate(Avg('weekly_sale'))['weekly_sale__avg'] else: weekly_sale_avg = Sale.objects.filter( Q(id__in=sale_ids), Q(week_id__range=(1, current_week[0]), year_id=today.year) | Q(week_id__range=(current_week[0]+1, 52), year_id=today.year-1)) \ .values('week_id') \ .annotate(weekly_sale=Avg('total_sale')) \ .aggregate(Avg('weekly_sale'))['weekly_sale__avg'] difference = weekly_sale_avg - avg_current_week_sale return Response({"difference": round(difference, 2)}, status=status.HTTP_200_OK) else: return Response({"error": { "message": "No data for current week" }}, status=status.HTTP_200_OK)
def get_total_per_week(queryset, date_field, total_expression): # Getting correct week and year of Monday Dec 30th 2019 is tricky, # because ExtractWeek will give correct week number 1, # but ExtractYear will give 2019 instead of 2020. # Thus we have to focus on the last day of the week # instead of the actual day. result = list( queryset.annotate( # TruncWeek truncates to midnight on the Monday of the week. monday_of_week=TruncWeek(date_field)). annotate( # Unfortunately relativedelta is not supported by Django ORM: we get a # `django.db.utils.ProgrammingError: can't adapt type 'relativedelta'` error. sunday_of_week=ExpressionWrapper( F("monday_of_week") + timedelta(days=6, hours=20), DateTimeField())).annotate( year=ExtractYear("sunday_of_week")).annotate( week=ExtractWeek("sunday_of_week")).values( "year", "week").annotate(total=total_expression).order_by( "year", "week")) return result
def update_or_create_weekly_activity_summaries(): SUMMARY_TYPE = 'W' SET_PERIODIC_SUMMARY_FIELDS = set_weekly_summary_fields dt_early_monday = get_first_day_of_next_week_on_dt(EARLIEST_TO_UPDATE) q_base = Activity.objects.filter(start_date__gte=dt_early_monday).annotate( year=ExtractIsoYear('start_date'), week=ExtractWeek('start_date')) q_stats = create_stats_qs(q_base.values('athlete')) q_pt_stats = create_stats_qs(q_base.values('type', 'athlete')) q_week = q_base.values('year', 'week').distinct() for frame in q_week: q = q_stats.filter(year=frame['year'], week=frame['week']) update_or_create_activity_summaries_total(SUMMARY_TYPE, SET_PERIODIC_SUMMARY_FIELDS, q, frame) q_type_week = q_base.values('type', 'year', 'week').distinct() for frame in q_type_week: q = q_pt_stats.filter(type=frame['type'], year=frame['year'], week=frame['week']) update_or_create_activity_summaries_per_type( SUMMARY_TYPE, SET_PERIODIC_SUMMARY_FIELDS, q, frame)
def get(self, request, *, value=-1, **kwargs): """Рендеринг и вывод страницы, проверка прав доступа.""" roadmap = get_object_or_404(Roadmap, id=value) if roadmap.user != request.user: raise Http404( 'У Вас недостаточно прав на просмотр статистики по данному списку задач.' ) if hasattr(roadmap, 'tasks'): tasks = roadmap.tasks.only('create_date').annotate( year=ExtractYear('create_date'), week=ExtractWeek('create_date')) scores = Scores.objects.filter(task__roadmap=roadmap).only( 'date', 'points', ).annotate(year=ExtractYear( ExpressionWrapper(F('date'), output_field=DateField())), month=ExtractMonth( ExpressionWrapper(F('date'), output_field=DateField())), week=ExtractWeek( ExpressionWrapper(F('date'), output_field=DateField()))) created_tasks = tasks.values( 'year', 'week').annotate(created=Count('week')) solved_tasks = scores.values('year', 'week').annotate(solved=Count('week')) points = scores.values('year', 'month').annotate( points=Sum('points')).order_by('year', 'month') if kwargs.get('graphs'): self.graph_plot(request.user, tasks, scores, kwargs.get('select_year')) table1 = {} for tasks in (created_tasks, solved_tasks): for task in tasks: if not table1.get(task.get('year')): table1[task.get('year')] = {} if not table1[task.get('year')].get(task.get('week')): start_date = Week(task.get('year'), task.get('week')).monday() end_date = start_date + timedelta(days=6) dates = str(start_date) + ' / ' + str(end_date) table1[task.get('year')][task.get('week')] = { 'dates': dates, 'created': 0, 'solved': 0 } if task.get('created'): table1[task.get('year')][task.get( 'week')]['created'] = task.get('created') elif task.get('solved'): table1[task.get('year')][task.get( 'week')]['solved'] = task.get('solved') table2 = {} for row in points: if not table2.get(row.get('year')): table2[row.get('year')] = [] data = {'month': row.get('month'), 'points': row.get('points')} table2[row.get('year')].append(data) else: table1 = {} table2 = {} return render( request, 'roadmap_statistics.html', { 'data': table1, 'points': table2, 'user': request.user, 'return_path': reverse('src:roadmap_list'), 'tables': kwargs.get('tables', False), 'graphs': kwargs.get('graphs', False), 'select_year': int(kwargs.get('select_year', -1)), })
def get_total_per_week(queryset, date_field, total_expression): result = list( queryset.annotate(year=ExtractYear(date_field)).annotate( week=ExtractWeek(date_field)).values("year", "week").annotate( total=total_expression).order_by("year", "week")) return result
def retrieve_transaction_hist(request, symbol, transaction_date, type, account_id=-1): # original format is 000001.SZ, only 000001 needed bs_entry = '' has_buy = False has_sell = False try: delta_hour = timedelta(hours=1) input_year = transaction_date.strftime('%Y') input_month = transaction_date.strftime('%m') input_week = transaction_date.strftime('%W') input_day = transaction_date.strftime('%d') input_min = transaction_date.strftime('%M') if type == 'M': # month transaction_list = Transactions.objects.filter( trader=request.user.id, trade_account=account_id, stock_code=symbol, trade_time__year=input_year, trade_time__month=input_month).exclude( created_or_mod_by='system').order_by('direction').distinct( 'direction') elif type == 'W': # week transaction_list = Transactions.objects.annotate( week=ExtractWeek('trade_time')).filter( trader=request.user.id, stock_code=symbol, trade_time__year=input_year, week=input_week).exclude( created_or_mod_by='system').order_by( 'direction').distinct('direction') elif type == 'D': # day transaction_list = Transactions.objects.filter( trader=request.user.id, trade_account=account_id, stock_code=symbol, trade_time__startswith=transaction_date.strftime( '%Y-%m-%d')).exclude(created_or_mod_by='system').order_by( 'direction').distinct('direction') elif type == '60': # 60 min delta_hour = timedelta(hours=1) transaction_list = Transactions.objects.filter( trader=request.user.id, trade_account=account_id, stock_code=symbol, trade_time__year=input_year, trade_time__month=input_month, trade_time__day=input_day, trade_time__hour=(transaction_date - delta_hour).strftime('%H')).exclude( created_or_mod_by='system').order_by( 'direction').distinct('direction') elif type == '30': # 30 min start_min = '0' end_min = '29' delta_hour = timedelta(hours=0) if input_min == '00': start_min = '30' end_min = '59' delta_hour = timedelta(hours=1) elif input_min == '30': start_min = '0' end_min = '29' transaction_list = Transactions.objects.filter( trader=request.user.id, trade_account=account_id, stock_code=symbol, trade_time__year=input_year, trade_time__month=input_month, trade_time__day=input_day, trade_time__hour=(transaction_date - delta_hour).strftime('%H'), trade_time__minute__gte=start_min, trade_time__minute__lte=end_min, ).exclude(created_or_mod_by='system').order_by( 'direction').distinct('direction') elif type == '15': # 15 min start_min = '0' end_min = '29' delta_hour = timedelta(hours=0) if input_min == '00': start_min = '45' end_min = '59' delta_hour = timedelta(hours=1) elif input_min == '15': start_min = '0' end_min = '14' elif input_min == '30': start_min = '15' end_min = '29' elif input_min == '45': start_min = '30' end_min = '44' transaction_list = Transactions.objects.filter( trader=request.user.id, trade_account=account_id, stock_code=symbol, trade_time__year=input_year, trade_time__month=input_month, trade_time__day=input_day, trade_time__hour=(transaction_date - delta_hour).strftime('%H'), trade_time__minute__gte=start_min, trade_time__minute__lte=end_min, ).exclude(created_or_mod_by='system').order_by( 'direction').distinct('direction') for rec in transaction_list: if rec.direction == 'b': has_buy = True else: has_sell = True if has_buy and has_sell: bs_entry = 'b&s' elif has_buy: bs_entry = 'b' elif has_sell: bs_entry = 's' except Exception as e: logger.error(e) return bs_entry
def get_context_data(self, **kwargs): context = super(DashboardView, self).get_context_data(**kwargs) week_start = datetime.datetime.today() - timedelta(days=180) year_data = (Run.objects.filter(date__gt='2011-12-31').annotate( year=ExtractYear('date')).values('year').annotate( dist=Sum('dist'), time=Sum('time'), cals=Sum('cals'), num=Count('date'))) total_data = (Run.objects.aggregate(tot_dist=Sum('dist'), tot_time=Sum('time'), tot_cals=Sum('cals'), num=Count('date'))) shoe_data = Run.objects.filter(shoes__active=True).values( 'shoes__name', 'shoes_id').annotate(dist=Sum('dist'), num=(Count('date'))).order_by('-dist') week_data = (Run.objects.filter(date__gte=week_start).annotate( year=ExtractYear('date')).annotate( week=ExtractWeek('date')).values('year', 'week').annotate( total_dist=Sum('dist'), time=Sum('time'), cals=Sum('cals'), num=Count('date'), max_dist=(Max('dist'))).order_by('-year', '-week')) for year in year_data: pace = datetime.timedelta( minutes=year.get('time').total_seconds() / year.get('dist')) year['pace'] = str(pace)[:4] tot_pace = datetime.timedelta( minutes=total_data.get('tot_time').total_seconds() / total_data.get('tot_dist')) total_data['tot_pace'] = str(tot_pace)[:4] for week in week_data: pace = datetime.timedelta( minutes=week.get('time').total_seconds() / week.get('total_dist')) week['pace'] = str(pace)[:4] # format date for display and add to dict for context d = str(week.get('year')) + str("-W") + str(week.get('week')) w = datetime.datetime.strptime(d + '-1', '%G-W%V-%u') week['date'] = w.strftime("%b %d, %Y") # % change calcs week_i = 1 long_run = 0 weekly_total = 0 while week_i <= 2: start_week = w - timedelta(weeks=week_i) print('start week: ', start_week) try: compare_week = week_data.get( year=str(datetime.datetime.strftime(start_week, '%Y')), week=str(datetime.datetime.strftime(start_week, '%W'))) wk_total_dist = compare_week.get('total_dist') wk_long_run = compare_week.get('max_dist') except ObjectDoesNotExist: wk_total_dist = 0 wk_long_run = 0 #print ('data: ', start_week, compare_week, wk_total_dist, weekly_total) if wk_total_dist > weekly_total: weekly_total = wk_total_dist if wk_long_run > long_run: long_run = wk_long_run week_i += 1 if weekly_total > 0: week['tot_change'] = (( (week.get('total_dist') - weekly_total) / weekly_total) * 100) else: week['tot_change'] = 100 if long_run > 0: week['long_change'] = (( (week.get('max_dist') - long_run) / long_run) * 100) else: week['long_change'] = 100 context.update({ 'years': year_data, 'weeks': week_data, 'shoes': shoe_data, 'totals': total_data, #'schedules': Plan.objects.filter(end_date__gt=datetime.datetime.now()) }) return context
def contract_list(request): contract_list = Contract.objects.all() contract_list_monthly = Contract.objects.annotate(month=TruncMonth('date_end')).values('company_id', 'contract_type', 'currency_type','month').annotate(summed_value=Sum('contract_value')).order_by() # contract_list_daily = Contract.objects.annotate(day=TruncDay('date_end')).values('day').annotate(summed_value=Sum('contract_value')).order_by() # print(contract_list_monthly) # print(contract_list_daily) # print(contract_list) contract_filter = ContractFilter(request.GET, queryset=contract_list) contract_filter_monthly = contract_filter.qs.annotate(month=TruncMonth('date_end')).values('company__name', 'contract_type__contract_type', 'currency_type__currency_type','month').annotate(summed_value=Sum('contract_value')).order_by() contract_filter_weekly = contract_filter.qs.annotate(month=TruncMonth('date_end'), week=ExtractWeek('date_end')).values('company__name', 'contract_type__contract_type', 'currency_type__currency_type','month').annotate(summed_value=Sum('contract_value')).order_by() # print(contract_list.filter(pk__in=[])) print(contract_filter_monthly) month_total_value_dict = {} if request.method == "GET": qd = request.GET if qd.get('selected_year'): date_year = int(qd.get('selected_year')) for month_number in range(1, 13): month_name = calendar.month_name[month_number] month_first_date = datetime.date(date_year, month_number, 1) month_last_date = datetime.date(date_year, month_number, calendar.monthrange(date_year, month_number)[1]) month_contractobj_qs = contract_filter.qs.filter(date_start__lte=month_last_date, date_end__gte=month_first_date) month_total_value_dict[month_name] = month_contractobj_qs.aggregate(monthly_value=Sum('contract_value')) context = { "contract_list": contract_list, "filter_monthly": contract_filter_monthly, "month_total_value_dict": month_total_value_dict, "filter_weekly": contract_filter_weekly, "filter": contract_filter, "title": "Contract List" } return render(request, "contract_list.html", context)
def get(self, request): all_events = (Event.objects.annotate( year=ExtractYear('start_datetime')).annotate( start_week=ExtractWeek('start_datetime'))) today = datetime.datetime.today() current_year = today.isocalendar()[0] current_week = today.isocalendar()[1] this_week_events = [] future_events = [] past_events = [] for event in all_events: if event.year == current_year and event.start_week == current_week: this_week_events.append(event) elif (event.year == current_year and event.start_week > current_week) or event.year > current_year: future_events.append(event) else: past_events.append(event) menu_items = [ MenuItem(description='Search', method='GET', path=reverse('search_wizard')) ] if not this_week_events: menu_items.append(MenuItem(description='No events this week')) else: menu_items.append( MenuItem(description='This week events ({})'.format( len(this_week_events)), method='GET', path=reverse('events', args=('current_week', )))) if not future_events: menu_items.append(MenuItem(description='No future events')) else: menu_items.append( MenuItem(description='Future evens ({})'.format( len(future_events)), method='GET', path=reverse('events', args=('future_events', )))) if not past_events: menu_items.append(MenuItem(description='No past events')) else: menu_items.append( MenuItem(description='Past evens ({})'.format( len(past_events)), method='GET', path=reverse('events', args=('past_events', )))) user = self.get_user() if user.is_staff: menu_items.insert( 0, MenuItem(description='Add event', method='GET', path=reverse('add_event'))) # check to see if we have notifications set in cache if cache.get('event_added'): menu_items.insert(0, MenuItem(description='Event added successfully')) cache.delete('event_added') if cache.get('event_edited'): menu_items.insert( 0, MenuItem(description='Event edited successfully')) cache.delete('event_edited') if cache.get('event_deleted'): menu_items.insert( 0, MenuItem(description='Event deleted successfully')) cache.delete('event_deleted') content = Menu(body=menu_items, header='menu') return self.to_response(content)