예제 #1
0
def workflow(request, pk):
    rounding = {"UBUNTU": 1, "MACOS": 10, "WINDOWS": 2}
    workflow = get_object_or_404(Workflow, id=pk, repo__user=request.user)
    context = {
        "workflow":
        workflow,
        "states":
        Run.objects.values('conclusion').distinct().annotate(
            Count('conclusion')),
        "artifact_count":
        Artifact.objects.filter(run__workflow=workflow, expired=False).count(),
        "artifact_size":
        Artifact.objects.filter(run__workflow=workflow,
                                expired=False).aggregate(Sum("size_in_bytes")),
        "run_count":
        Run.objects.filter(workflow=workflow).count(),
        "elapsed_time_stats":
        Run.objects.filter(workflow=workflow).aggregate(
            Sum('elapsed'), Avg('elapsed')),
        "timings_seconds": {
            "UBUNTU": 0,
            "MACOS": 0,
            "WINDOWS": 0
        },
        "timings_rounded": {
            "UBUNTU": 0,
            "MACOS": 0,
            "WINDOWS": 0
        },
        "timings_multiplied": {
            "UBUNTU": 0,
            "MACOS": 0,
            "WINDOWS": 0
        },
        "counts": {
            "UBUNTU": 0,
            "MACOS": 0,
            "WINDOWS": 0
        },
        "daily_run_count":
        Run.objects.filter(workflow=workflow).annotate(
            day=TruncDate('start_time')).values('day').annotate(
                run_count=Count('id')),
        "daily_artifact_size":
        Run.objects.filter(workflow=workflow).annotate(
            day=TruncDate('start_time')).values('day').annotate(
                total_artifact_size=Sum('artifact__size_in_bytes')),
    }
    # Because I'm a coward let's do this the easy way.
    timing_queryset = Timing.objects.filter(
        run__workflow=workflow).values_list("os", "length", "jobs")
    # A nice easy dict for the templates to understand.
    for os, length_ms, jobs in timing_queryset:
        seconds = length_ms / 1000.0
        context["timings_seconds"][os] += seconds
        context["timings_multiplied"][os] += seconds * rounding[os]
        context["timings_rounded"][os] += math.ceil(seconds * rounding[os] /
                                                    60.0)
        context["counts"][os] += jobs
    return render(request, "misc/workflow.html", context)
예제 #2
0
def get_total_item_chart(startDate, endDate, report, dataSource):
    column_name = report + '__name'
    if dataSource == 'all':
        total_item_by_date = list(
            Touchpoint.objects.filter(
                time__range=[startDate, endDate],
                data_source__is_public=True).annotate(
                    date=TruncDate('time')).values(
                        'date',
                        column_name).annotate(total=Count('id')).order_by(
                            'date', column_name))
    else:
        total_item_by_date = list(
            Touchpoint.objects.filter(
                time__range=[startDate, endDate],
                data_source=dataSource).annotate(
                    date=TruncDate('time')).values(
                        'date',
                        column_name).annotate(total=Count('id')).order_by(
                            'date', column_name))
    report_data = []

    if (len(total_item_by_date) != 0):
        report_data = [[
            item['date'].strftime("%d-%m-%Y"), item['total'], item[column_name]
        ] for item in total_item_by_date]
    return report_data
    def update_daily_active_users_dates(self, course, last_tracker_pk,
                                        newest_tracker_pk, tracker_date_field,
                                        dau_total_date_field, dau_type,
                                        course_no, course_total):

        trackers = Tracker.objects.filter(pk__gt=last_tracker_pk,
                                          pk__lte=newest_tracker_pk,
                                          course=course) \
            .annotate(day=TruncDate(tracker_date_field)) \
            .values('day').distinct()

        trackers_count = trackers.count()

        # for each tracker update the DAU model
        for idx, tracker in enumerate(trackers):
            self.stdout.write(
                'Updating DAUs for %s - %s (%s: course %d/%d DAU %d/%d)' %
                (tracker['day'], course.get_title(), dau_type, course_no + 1,
                 course_total, idx + 1, trackers_count))

            users = Tracker.objects.annotate(
                day=TruncDate(tracker_date_field)) \
                .filter(day=tracker['day'], course=course).values_list('user',
                                                        flat=True).distinct()

            total_users = len(users)
            dau_obj, created = DailyActiveUsers.objects.update_or_create(
                day=tracker['day'],
                defaults={dau_total_date_field: total_users})

            for user_id in users:
                self.update_daily_active_users_update(tracker,
                                                      tracker_date_field,
                                                      user_id, course, dau_obj,
                                                      dau_type)
예제 #4
0
파일: views.py 프로젝트: slevin886/MassMaps
def home(request):
    # TODO: only works for driving for now
    avg_commutes = Commute.objects.filter(mode='driving').values(
        'origin__name',
        'origin__latitude',
        'origin__longitude',
    ).annotate(average_time_dist=Avg('in_traffic') / Avg('distance'),
               average_time=Avg('in_traffic'),
               average_traffic=Avg('in_traffic') -
               Avg('duration')).order_by('average_time_dist').all()

    morning_commutes = Commute.objects.filter(
        mode='driving', date__hour__lt=12).values(
            time=Cast(TruncDate('date'), CharField())).annotate(
                avg_time=Avg('in_traffic')).values('time', 'avg_time')

    evening_commutes = Commute.objects.filter(
        mode='driving', date__hour__gt=12).values('date__date').order_by(
            'date__date').annotate(avg_time=Avg('in_traffic')).annotate(
                time=Cast(TruncDate('date'), CharField())).values(
                    'time', 'avg_time')

    return render(
        request, 'app_maps/home.html', {
            'avg_commutes': list(avg_commutes),
            'evening_commutes': list(evening_commutes),
            'morning_commutes': list(morning_commutes),
        })
예제 #5
0
    def test_trunc_date_func(self):
        start_datetime = microsecond_support(
            datetime(2015, 6, 15, 14, 30, 50, 321))
        end_datetime = microsecond_support(
            datetime(2016, 6, 15, 14, 10, 50, 123))
        if settings.USE_TZ:
            start_datetime = timezone.make_aware(start_datetime, is_dst=False)
            end_datetime = timezone.make_aware(end_datetime, is_dst=False)
        self.create_model(start_datetime, end_datetime)
        self.create_model(end_datetime, start_datetime)
        self.assertQuerysetEqual(
            DTModel.objects.annotate(extracted=TruncDate(
                'start_datetime')).order_by('start_datetime'), [
                    (start_datetime, start_datetime.date()),
                    (end_datetime, end_datetime.date()),
                ], lambda m: (m.start_datetime, m.extracted))
        self.assertEqual(
            DTModel.objects.filter(
                start_datetime__date=TruncDate('start_datetime')).count(), 2)

        with self.assertRaisesMessage(
                ValueError,
                "Cannot truncate TimeField 'start_time' to DateField"):
            list(DTModel.objects.annotate(truncated=TruncDate('start_time')))

        with self.assertRaisesMessage(
                ValueError,
                "Cannot truncate TimeField 'start_time' to DateField"):
            list(
                DTModel.objects.annotate(truncated=TruncDate(
                    'start_time', output_field=TimeField())))
예제 #6
0
파일: api.py 프로젝트: jcroot/ayudapy
def StatsDailyView(request):
    try:
        date_from = request.GET["date_from"]
        date_to = request.GET["date_to"]
    except:
        error_msg = "You should specify query parameters date_from and date_to"
        return JsonResponse({"msg": error_msg},
                            status=status.HTTP_400_BAD_REQUEST)


    total_active = HelpRequest.objects.filter(active=True, resolved = False, added__gte=date_from, added__lte=date_to) \
                                      .annotate(date = TruncDate('added')) \
                                      .values('date') \
                                      .annotate(total=Count('date')) \
                                      .order_by('date') \
                                      .values('date', 'total')
    distinct_requests = HelpRequest.objects.filter(active=True, resolved=False, added__gte=date_from, added__lte=date_to) \
                                           .distinct('phone').values('id')
    total_active_unique_phone = HelpRequest.objects.filter(id__in=distinct_requests) \
                                                   .annotate(date = TruncDate('added')) \
                                                   .values('date') \
                                                   .annotate(total=Count('date')) \
                                                   .order_by('date') \
                                                   .values('date', 'total')
    total_resolved = HelpRequest.objects.filter(resolved=True, added__gte=date_from, added__lte=date_to) \
                                        .annotate(date = TruncDate('added')) \
                                        .values('date') \
                                        .annotate(total=Count('date')) \
                                        .order_by('date') \
                                        .values('date', 'total')
    stats = dict(total_active=list(total_active),
                 total_active_unique_phone=list(total_active_unique_phone),
                 total_resolved=list(total_resolved))
    return JsonResponse(stats, )
예제 #7
0
    def get_relay_history_stats(
            self,
            from_date: datetime.datetime = None,
            to_date: datetime.datetime = None) -> Dict[str, Any]:

        from_date = from_date if from_date else datetime.datetime(
            2018, 11, 1, tzinfo=utc)
        to_date = to_date if to_date else timezone.now()

        def add_time_filter(queryset):
            return queryset.filter(created__range=(from_date, to_date))

        return {
            'safes_created': {
                'deployed':
                add_time_filter(SafeContract.objects.deployed()).annotate(
                    created_date=TruncDate('created')).values('created_date').
                annotate(number=Count('*')).order_by('created_date'),
                'average_deploy_time_seconds':
                SafeContract.objects.get_average_deploy_time_grouped(
                    from_date, to_date),
                'average_deploy_time_total_seconds':
                SafeContract.objects.get_average_deploy_time_total_grouped(
                    from_date, to_date),
                'payment_tokens':
                SafeContract.objects.get_creation_tokens_usage_grouped(
                    from_date, to_date),
                'funds_stored': {
                    'ether':
                    SafeContract.objects.get_total_balance_grouped(
                        from_date, to_date),
                    'tokens':
                    SafeContract.objects.get_total_token_balance_grouped(
                        from_date, to_date),
                }
            },
            'relayed_txs': {
                'total':
                add_time_filter(
                    SafeMultisigTx.objects.annotate(created_date=TruncDate(
                        'created')).values('created_date').annotate(
                            number=Count('*')).order_by('created_date')),
                'average_execution_time_seconds':
                SafeMultisigTx.objects.get_average_execution_time_grouped(
                    from_date, to_date),
                'payment_tokens':
                add_time_filter(
                    SafeMultisigTx.objects.get_tokens_usage_grouped()),
                'volume': {
                    'ether':
                    SafeContract.objects.get_total_volume_grouped(
                        from_date, to_date),
                    'tokens':
                    SafeContract.objects.get_total_token_volume_grouped(
                        from_date, to_date),
                }
            }
        }
예제 #8
0
def statisticslogs(request):
    stat_type = request.GET.get('stat_type')
    stat_gbn = request.GET.get('optionRadios')
    to_date = request.GET.get('to_date')
    from_date = request.GET.get('from_date')
    if stat_type == 'M':
        if stat_gbn == 'period':
            stats = Log.objects \
                .filter(log_date__range=[from_date, to_date]) \
                .annotate(stat_date=TruncMonth('log_date')) \
                .order_by('-stat_date') \
                .values('stat_date') \
                .annotate(stat_count=Count('log_userid')
                          ).values('stat_date', 'stat_count')
        else:
            stats = Log.objects \
                .annotate(stat_date=TruncMonth('log_date')) \
                .order_by('-stat_date') \
                .values('stat_date') \
                .annotate(stat_count=Count('log_userid')
                          ).values('stat_date', 'stat_count')
    else:
        if stat_gbn == 'period':
            stats = Log.objects \
                .filter(log_date__range=[from_date, to_date])\
                .annotate(stat_date=TruncDate('log_date')) \
                .order_by('-stat_date') \
                .values('stat_date') \
                .annotate(stat_count=Count('log_userid')
                          ).values('stat_date', 'stat_count')
        else:
            stats = Log.objects \
                .annotate(stat_date=TruncDate('log_date')) \
                .order_by('-stat_date')\
                .values('stat_date') \
                .annotate(stat_count=Count('log_userid')
                          ).values('stat_date', 'stat_count')

    date_list = []
    date_count = []
    for stat in stats:
        date_list.append(str(stat['stat_date']))
        date_count.append(str(stat['stat_count']))

    date_list.reverse()
    date_count.reverse()

    context = {
        'stats': stats,
        'stat_type': stat_type,
        'optionRadios': stat_gbn,
        'to_date': to_date,
        'from_date': from_date,
        'date_list': json.dumps(date_list),
        'date_count': json.dumps(date_count)
    }
    return render(request, 'adminpage/statistics_logs.html', context)
 def right_length(asset):
     return (
         Trade.objects.filter(Q(buy_asset=asset) | Q(
             sell_asset=asset)).order_by().annotate(
                 date2=TruncDate('date')).values('date2').annotate(
                     x=Count('*')).count() +
         Transaction.objects.filter(asset=asset).order_by().annotate(
             date2=TruncDate('date')).values('date2').annotate(
                 x=Count('*')).count() +
         Flow.objects.filter(asset=asset).order_by().annotate(
             date2=TruncDate('date')).values('date2').annotate(
                 x=Count('*')).count())
예제 #10
0
    def get(self, request, *args, **kwargs):
        id = request.GET.get('id', None)
        job = None
        if id is not None:
            job = Job.objects.filter(id=id).first()
            tasks = {}
            if job.has_keyword_search:
                tasks['ks'] = KeywordSearch.objects.filter(parent=job)
                ks_by_date = tasks['ks'].annotate(
                    day=TruncDate('created_at')).values('day').annotate(
                        c=Count('id')).values('day', 'c')
                data = {}
                for k in ks_by_date:
                    data[k['day']] = k['c']
                tasks['ks_by_date'] = [{
                    'day': d.strftime('%Y-%m-%d'),
                    'c': data[d] if d in data else 0
                } for d in daterange(start_date=now() - timedelta(+30),
                                     end_date=now() + timedelta(+1))]
                tasks['ks_by_keywords'] = tasks['ks'].values(
                    'keyword').annotate(c=Count('id')).values(
                        'keyword', 'c').order_by('-c')[:5]
            if job.has_qbe_search:
                tasks['qbe'] = QBESearch.objects.filter(parent=job)
            if job.has_survey:
                tasks['survey'] = job.surveys.annotate(
                    credit=Sum('credits__amount')).all()
                answers = []
                for survey in tasks['survey']:
                    answer = survey.survey.responses.annotate(
                        day=TruncDate('created')).values('day').annotate(
                            c=Count('id')).values('day', 'c')
                    data = {}
                    for k in answer:
                        data[k['day']] = k['c']
                    data2 = [{
                        'day': d.strftime('%Y-%m-%d'),
                        'c': data[d] if d in data else 0
                    } for d in daterange(start_date=now() - timedelta(+30),
                                         end_date=now() + timedelta(+1))]
                    answers.append({'id': survey.id, 'data': data2})
                tasks['answers_by_date'] = answers
                users = PyanoUser.objects.all().annotate(
                    c=Count('responses')).values('username',
                                                 'c').order_by('-c')[:5]

                tasks['users_surveys'] = users
        return render(request,
                      template_name=self.template_name,
                      context={
                          'job': job,
                          'tasks': tasks
                      })
예제 #11
0
    def list(self, request):
        query_params = FailuresQueryParamsSerializer(data=request.query_params)
        if not query_params.is_valid():
            return Response(data=query_params.errors,
                            status=HTTP_400_BAD_REQUEST)

        startday = query_params.validated_data['startday']
        endday = get_end_of_day(query_params.validated_data['endday'])
        repo = list(get_repository(query_params.validated_data['tree']))
        bug_id = query_params.validated_data['bug']

        push_query = (Push.objects.filter(
            repository_id__in=repo, time__range=(startday, endday)).annotate(
                date=TruncDate('time')).values('date').annotate(
                    test_runs=Count('author')).order_by('date').values(
                        'date', 'test_runs'))

        if bug_id:
            job_query = (BugJobMap.failures.default(
                repo, startday, endday).by_bug(bug_id).annotate(
                    date=TruncDate('job__push__time')).values('date').annotate(
                        failure_count=Count('id')).order_by('date').values(
                            'date', 'failure_count'))
        else:
            job_query = (Job.objects.filter(
                push__time__range=(startday, endday),
                repository_id__in=repo,
                failure_classification_id=4).select_related('push').annotate(
                    date=TruncDate('push__time')).values('date').annotate(
                        failure_count=Count('id')).order_by('date').values(
                            'date', 'failure_count'))

        # merges the push_query and job_query results into a list; if a date is found in both queries,
        # update the job_query with the test_run count, if a date is in push_query but not job_query,
        # add a new object with push_query data and a default for failure_count
        self.queryset = []
        for push in push_query:
            # Casting to list since Python 3's `filter` produces an iterator
            # rather than a list, which is not subscriptable.
            match = list(filter(lambda x: push['date'] == x['date'],
                                job_query))
            if match:
                match[0]['test_runs'] = push['test_runs']
                self.queryset.append(match[0])
            else:
                self.queryset.append({
                    'date': push['date'],
                    'test_runs': push['test_runs'],
                    'failure_count': 0
                })

        serializer = self.get_serializer(self.queryset, many=True)
        return Response(serializer.data)
예제 #12
0
def detail(request, customer_id=3000):
    try:
        #get text input id
        orig = request.GET['originaltxt'].lower()
        #convert to integer
        customer_id = int(orig)
    except:
        customer_id = 3000

    tt = Reading.objects.filter(customer=customer_id).annotate(
        date=TruncDate('timestamp')).values('date').annotate(
            units=Sum('units')).values('date', 'units')
    data_per_time = []
    for each in tt:
        data_per_time.append(each['units'])
    stddev = np.std(data_per_time)

    #return date and sum
    t_indiv = Reading.objects.filter(customer=customer_id).annotate(
        date=TruncDate('timestamp')).values('date').annotate(
            units=Sum('units')).values('date', 'units')
    data_indiv = {'x': [], 'y': []}
    data_var = {'x': [], 'y': [], 'z': []}

    for each in t_indiv:
        data_indiv['x'].append(str(each['date']))
        data_indiv['y'].append(each['units'])
        data_var['x'].append(str(each['date']))
        data_var['y'].append((each['units']) + stddev)
        data_var['z'].append((each['units']) - stddev)
    #rolling average and standard deviation
    df = pd.DataFrame(data_var)
    df['y'] = df['y'].rolling(window=10).mean()
    df['y'].fillna(df['y'].mean(), inplace=True)
    df['z'] = df['z'].rolling(window=10).mean()
    df['z'].fillna(df['z'].mean(), inplace=True)
    data_var_pos = {'x': [], 'y': []}
    data_var_neg = {'x': [], 'y': []}
    data_var_pos['y'] = df['y'].tolist()
    data_var_pos['x'] = df['x'].tolist()
    data_var_neg['y'] = df['z'].tolist()
    data_var_neg['x'] = df['x'].tolist()

    #data_total = [data_indiv, data_var]
    latest_info_list = Customer.objects.get(id=customer_id)
    context = {
        'data_indiv': json.dumps(data_indiv),
        'latest_info_list': latest_info_list,
        'data_var': json.dumps(data_var_pos),
        'data_var_neg': json.dumps(data_var_neg)
    }
    return render(request, 'consumption/detail.html', context)
예제 #13
0
    def get_queryset(self):
        startday = self.request.query_params.get('startday')
        endday = get_end_of_day(
            self.request.query_params.get('endday').encode('utf-8'))
        repo = list(get_repository(self.request.query_params.get('tree')))
        bug_id = self.request.query_params.get('bug')

        push_query = Push.objects.filter(
            repository_id__in=repo, time__range=(startday, endday)).annotate(
                date=TruncDate('time')).values('date').annotate(
                    test_runs=Count('author')).order_by('date').values(
                        'date', 'test_runs')

        if bug_id:
            job_query = BugJobMap.objects.filter(job__repository_id__in=repo,
                                                 job__push__time__range=(startday, endday),
                                                 job__failure_classification__id=4,
                                                 bug_id=int(bug_id)
                                                 ).select_related('push').annotate(date=TruncDate('job__push__time'))\
                                                 .values('date').annotate(failure_count=Count('id')).order_by(
                                                 'date').values('date', 'failure_count')
        else:
            job_query = Job.objects.filter(
                push__time__range=(startday, endday),
                repository_id__in=repo,
                failure_classification_id=4).select_related('push').annotate(
                    date=TruncDate('push__time')).values('date').annotate(
                        failure_count=Count('id')).order_by('date').values(
                            'date', 'failure_count')

        # merges the push_query and job_query results into a list; if a date is found in both queries,
        # update the job_query with the test_run count, if a date is in push_query but not job_query,
        # add a new object with push_query data and a default for failure_count
        queryset = []
        for push in push_query:
            # Casting to list since Python 3's `filter` produces an iterator
            # rather than a list, which is not subscriptable.
            match = list(filter(lambda x: push['date'] == x['date'],
                                job_query))
            if match:
                match[0]['test_runs'] = push['test_runs']
                queryset.append(match[0])
            else:
                queryset.append({
                    'date': push['date'],
                    'test_runs': push['test_runs'],
                    'failure_count': 0
                })

        return queryset
예제 #14
0
    def list(self, request):
        query_params = FailuresQueryParamsSerializer(data=request.query_params)
        if not query_params.is_valid():
            return Response(data=query_params.errors,
                            status=HTTP_400_BAD_REQUEST)

        startday = query_params.validated_data['startday']
        endday = get_end_of_day(query_params.validated_data['endday'])
        repo = query_params.validated_data['tree']
        bug_id = query_params.validated_data['bug']

        push_query = (Push.failures.filter(
            time__range=(startday, endday)).by_repo(repo, False).annotate(
                date=TruncDate('time')).values('date').annotate(
                    test_runs=Count('author')).values('date', 'test_runs'))

        if bug_id:
            job_query = (BugJobMap.failures.by_date(
                startday, endday).by_repo(repo).by_bug(bug_id).annotate(
                    date=TruncDate('job__push__time')).values('date').annotate(
                        failure_count=Count('id')).values(
                            'date', 'failure_count'))
        else:
            job_query = (Job.failures.filter(
                push__time__range=(startday, endday),
                failure_classification_id=4).by_repo(
                    repo, False).select_related('push').annotate(
                        date=TruncDate('push__time')).values('date').annotate(
                            failure_count=Count('id')).values(
                                'date', 'failure_count'))

        # merges the push_query and job_query results into a list; if a date is found in both queries,
        # update the job_query with the test_run count, if a date is in push_query but not job_query,
        # add a new object with push_query data and a default for failure_count
        self.queryset = []
        for push in push_query:
            match = [job for job in job_query if push['date'] == job['date']]
            if match:
                match[0]['test_runs'] = push['test_runs']
                self.queryset.append(match[0])
            else:
                self.queryset.append({
                    'date': push['date'],
                    'test_runs': push['test_runs'],
                    'failure_count': 0
                })

        serializer = self.get_serializer(self.queryset, many=True)
        return Response(serializer.data)
예제 #15
0
 def queryset(self, request, queryset):
     val = self.value()
     if val == 'n':
         return queryset.annotate(
             date=TruncDate(Cast(Lower('schedule'), dt_field))).filter(
                 date=localdate())
     elif val == 'y':
         return queryset.annotate(
             date=TruncDate(Cast(Lower('schedule'), dt_field))).filter(
                 date=localdate() - timedelta(days=1))
     elif val == 't':
         return queryset.annotate(
             date=TruncDate(Cast(Lower('schedule'), dt_field))).filter(
                 date=localdate() + timedelta(days=1))
     return queryset
예제 #16
0
    def update_daily_active_users_update(self, tracker, tracker_date_field,
                                         user_id, course, dau_obj, dau_type):

        try:
            user_obj = User.objects.get(pk=user_id)
        except User.DoesNotExist:
            return

        time_spent = Tracker.objects.annotate(
            day=TruncDate(tracker_date_field)) \
            .filter(day=tracker['day'], user=user_obj) \
            .aggregate(time=Sum('time_taken'))

        # to avoid number out of no seconds in a day
        if time_spent['time'] > self.MAX_TIME:
            time_taken = self.MAX_TIME
        else:
            time_taken = time_spent['time']

        if time_taken != 0:
            dau, created = DailyActiveUser.objects.get_or_create(dau=dau_obj,
                                                                 user=user_obj,
                                                                 type=dau_type,
                                                                 course=course)
            dau.time_spent = time_taken
            dau.save()
            if created:
                self.stdout.write("added %s" % user_obj.username)
            else:
                self.stdout.write("updated %s" % user_obj.username)
예제 #17
0
    def get_queryset(self):

        # Count all the issues per day
        queryset = (Issue.objects.annotate(
            date=TruncDate("created")).values("date").annotate(
                total=Count("id")))

        # Filter by repository
        repository = self.request.query_params.get("repository")
        if repository:
            queryset = queryset.filter(
                diff__revision__repository__slug=repository)

        # Filter by analyzer
        analyzer = self.request.query_params.get("analyzer")
        if analyzer:
            queryset = queryset.filter(analyzer=analyzer)

        # Filter by check
        check = self.request.query_params.get("check")
        if check:
            queryset = queryset.filter(check=check)

        # Filter by date
        since = self.request.query_params.get("since")
        if since is not None:
            try:
                since = datetime.strptime(since, "%Y-%m-%d").date()
            except ValueError:
                raise APIException(
                    detail="invalid since date - should be YYYY-MM-DD")
            queryset = queryset.filter(date__gte=since)

        return queryset.order_by("date")
예제 #18
0
 def get_context_data(self, **kwards):
     context = super().get_context_data(**kwards)
     if self.request.user.is_authenticated:
         context['task_list'] = models.Task.objects.annotate(date=TruncDate(
             'deadline_time')).order_by('-deadline_time').filter(
                 user=self.request.user)
     return context
예제 #19
0
    def list_filter(self, request, filter, *args, **kwargs):
        queryset = self.filter_queryset(self.get_queryset())

        if filter == 'date':
            if kwargs.get('date', ''):
                date = timezone.datetime(kwargs.get('date')).date()
            else:
                date = timezone.now().date()

            queryset = queryset.annotate(
                todo_at=TruncDate('date_todo')).filter(user=request.user,
                                                       todo_at=date)

        elif filter == 'status':
            status_choice = kwargs.get('status', '')
            if status_choice and status_choice.capitalize(
            ) in Todo.get_status_choices_display()[:-1]:
                queryset = queryset.filter(user=request.user,
                                           status=status_choice)
            else:
                queryset = queryset.filter(user=request.user)

        else:
            queryset = queryset.filter(user=request.user)

        return Response(TodoSerializer(queryset, many=True).data)
예제 #20
0
 def _trr_timeline(self):
     trr_timeline_queryset = self.officer.trr_set.all().annotate(
         trr_date=TruncDate('trr_datetime')).annotate(
             **self.unit_subqueries('trr_date')).annotate(
                 **self.rank_subquery('trr_date'))
     return self.trr_new_timeline_serializer(trr_timeline_queryset,
                                             many=True).data
예제 #21
0
    def to_representation(self, user):
        now = timezone.now()
        one_day_from_now = now + datetime.timedelta(hours=24)

        reviews = get_users_reviews(user).filter(next_review_date__range=(now, one_day_from_now)) \
            .annotate(hour=TruncHour('next_review_date', tzinfo=timezone.utc)) \
            .annotate(date=TruncDate('next_review_date', tzinfo=timezone.utc)) \
            .values("streak", "date", "hour") \
            .annotate(review_count=Count('id')).order_by("date", "hour")
        expected_hour = now.hour
        hours = [
            hour % 24 for hour in range(expected_hour, expected_hour + 24)
        ]

        retval = OrderedDict.fromkeys(hours)

        for key in retval.keys():
            retval[key] = OrderedDict.fromkeys(
                [level.name for level in KwSrsLevel], 0)

        for review in reviews:
            found_hour = review['hour'].hour
            while found_hour != expected_hour:
                expected_hour = (expected_hour + 1) % 24
            streak = review['streak']
            srs_level = STREAK_TO_SRS_LEVEL_MAP_KW[streak].name
            retval[expected_hour][srs_level] += review["review_count"]

        real_retval = [[count for srs_level, count in hourly_count.items()]
                       for hour, hourly_count in retval.items()]
        return real_retval
예제 #22
0
def water_chart(request):
    labels = []
    datasets_dict = {}

    queryset = (MeterReading.objects.annotate(
        date=TruncDate('date_time')).values('date').annotate(
            date_consumption=Sum('value')).values('meter__building__name',
                                                  'date_consumption', 'date'))
    dates = sorted(list(set([reading['date'] for reading in queryset])))
    for date in dates:
        labels.append(date)
        for reading in [
                reading for reading in queryset if reading['date'] == date
        ]:
            building_name = reading['meter__building__name']
            if building_name in datasets_dict:
                datasets_dict[building_name].append(
                    reading['date_consumption'])
            else:
                datasets_dict[building_name] = [reading['date_consumption']]

    def create_random_colour():
        return f'rgba{tuple(np.random.randint(256, size=4))}'

    datasets = [{
        'label': k,
        'fill': 'false',
        'backgroundColor': create_random_colour(),
        'data': datasets_dict[k]
    } for k in datasets_dict]

    return JsonResponse(data={
        'labels': labels,
        'datasets': datasets,
    })
예제 #23
0
def dashboard(request):

    articles_count = Blog.objects.count()

    oldest_article = Blog.objects.aggregate(Min('created_at'))

    authors_count = Blog.objects.aggregate(Count('author', distinct=True))

    read_number_stats = BlogCounts.objects.aggregate(Avg('read_number'),
                                                     Max('read_number'),
                                                     Min('read_number'),
                                                     Sum('read_number'))

    articles_by_day = Blog.objects\
        .annotate(created_day=TruncDate('created_at'))\
        .values('created_day')\
        .annotate(article_count=Count('created_day'))\
        .order_by()

    highest_read = Blog.objects.order_by('-blogcounts__read_number')[:3]

    context = {
        "articles_count": articles_count,
        "articles_by_day": articles_by_day,
        "authors_count": authors_count,
        "oldest_article": oldest_article,
        "read_number_stats": read_number_stats,
        "highest_read": highest_read
    }

    return render(request, 'dashboard.html', context)
예제 #24
0
def build_upcoming_srs_for_user(user):
    start, finish = get_24_hour_time_span()
    reviews = get_users_reviews(user).filter(
        next_review_date__range=(start, finish)
    )

    reviews = (
        reviews.annotate(
            hour=TruncHour("next_review_date", tzinfo=timezone.utc)
        )
        .annotate(date=TruncDate("next_review_date", tzinfo=timezone.utc))
        .values("date", "hour")
        .annotate(review_count=Count("id"))
        .order_by("date", "hour")
    )

    logger.debug(f"Building upcoming SRS details for {user.username}")
    expected_hour = start.hour
    hours = [hour % 24 for hour in range(expected_hour, expected_hour + 24)]
    retval = OrderedDict.fromkeys(hours, 0)
    for review in reviews:
        found_hour = review["hour"].hour
        while found_hour != expected_hour:
            logger.debug(f"{found_hour} != {expected_hour}, skipping.")
            expected_hour = (expected_hour + 1) % 24
        retval[expected_hour] = review["review_count"]
        logger.debug(f"Inserting reviews at hour {expected_hour}")
    real_retval = [value for key, value in retval.items()]
    return real_retval
예제 #25
0
def build_upcoming_srs_for_user(user):
    start, finish = get_24_hour_time_span()
    reviews = get_users_reviews(user).filter(next_review_date__range=(start, finish))

    for review in reviews:
        logger.debug(review.next_review_date)

    reviews = reviews \
        .annotate(hour=TruncHour('next_review_date', tzinfo=timezone.utc)) \
        .annotate(date=TruncDate('next_review_date', tzinfo=timezone.utc)) \
        .values("date", "hour") \
        .annotate(review_count=Count('id')).order_by("date", "hour")

    expected_hour = start.hour
    hours = [hour % 24 for hour in range(expected_hour, expected_hour + 24)]
    retval = OrderedDict.fromkeys(hours, 0)
    for review in reviews:
        found_hour = review['hour'].hour
        while found_hour != expected_hour:
            logger.debug("{} != {}, skipping.".format(found_hour, expected_hour))
            expected_hour = (expected_hour + 1) % 24
        retval[expected_hour] = review["review_count"]
        logger.debug("Inserting reviews at hour {}".format(expected_hour))

    real_retval = [value for key, value in retval.items()]
    return real_retval
예제 #26
0
    def get_queryset(self, **kwargs):
        shop = ShopAssistant.objects.get(user=self.request.user).shop
        index = self.request.GET.get('page')
        if index == None:
            index = 1
        booking_dates = VendorBooking.objects\
            .filter(vendor__shop=shop)\
            .annotate(_date=TruncDate('date'))\
                                   .values('date')\
                                   .annotate(Sum('amount_paid'))
        if not booking_dates:
            return {}

        self.request.session['page'] = index

        self.booking_dates = booking_dates
        booking_date = booking_dates[int(index) - 1].get('date')
        total = booking_dates[int(index) - 1].get('amount_paid__sum')
        objects = VendorBooking.objects\
            .filter(date=booking_date, vendor__shop=shop)\
            .prefetch_related('productbooking_set', \
                              'productbooking_set__product')\
            .select_related('vendor')
        sales_dict = {}
        sales_container = []

        sales_dict['date'] = booking_date
        sales_dict['sales'] = objects
        sales_dict['total'] = total
        sales_container.append(sales_dict)
        return sales_container
예제 #27
0
파일: users.py 프로젝트: garinm90/ephios
    def get_workhour_items(self):
        from ephios.core.models import AbstractParticipation

        participation = (
            self.localparticipation_set.filter(
                state=AbstractParticipation.States.CONFIRMED).annotate(
                    hours=ExpressionWrapper(
                        (F("shift__end_time") - F("shift__start_time")
                         )  # calculate length of shift in μs
                        / 1000000  # convert microseconds to seconds
                        / 3600,  # convert seconds to hours
                        output_field=models.DecimalField(),
                    ),
                    date=ExpressionWrapper(TruncDate(F("shift__start_time")),
                                           output_field=DateField()),
                    reason=F("shift__event__title"),
                ).values("hours", "date", "reason"))
        workinghours = self.workinghours_set.all().values(
            "hours", "date", "reason")
        hour_sum = (participation.aggregate(Sum("hours"))["hours__sum"]
                    or 0) + (workinghours.aggregate(Sum("hours"))["hours__sum"]
                             or 0)
        return hour_sum, list(
            sorted(chain(participation, workinghours),
                   key=lambda k: k["date"]))
예제 #28
0
def restaurant_dashboard(request):
    allowed_categories = ['month', 'date']
    order_category = request.GET.get('order_by', 'date')

    # if order_category not in allowed_categories:
    #     return redirect('restaurants:restaurant_dashboard')

    if order_category == 'month':
        sales = Order.objects.filter(complete=True) \
                                        .filter(items__food__restaurant=request.user.restaurant) \
                                        .annotate(month=TruncMonth('created')) \
                                        .values('month') \
                                        .annotate(count=Count('id')) \
                                        .order_by()
        labels = [sale['month'].strftime('%b %Y') for sale in sales]
    elif order_category == 'date':
        sales = Order.objects.filter(complete=True) \
                                        .filter(items__food__restaurant=request.user.restaurant) \
                                        .annotate(date=TruncDate('created')) \
                                        .values('date') \
                                        .annotate(count=Count('id')) \
                                        .order_by()

        labels = [sale['date'].strftime('%d %b %Y') for sale in sales]

    data = [sale['count'] for sale in sales]

    context = {
        'restaurant': request.user.restaurant,
        'section': 'dashboard',
        'labels': labels,
        'data': data,
        'order_parameter': order_category
    }
    return render(request, 'restaurants/dashboard.html', context)
예제 #29
0
파일: checkin.py 프로젝트: astrocbxy/pretix
 def entries_days(self):
     tz = self._clist.event.timezone
     with override(tz):
         return self._position.checkins.filter(
             list=self._clist,
             type=Checkin.TYPE_ENTRY).annotate(day=TruncDate(
                 'datetime', tzinfo=tz)).values('day').distinct().count()
예제 #30
0
def get_date_count(date=None, user=1):
    if not date:
        date = timezone.localtime(timezone.now()).date()

    return functions.get_all_review(user=functions.get_user(user))\
        .annotate(date=TruncDate('review_time'))\
        .filter(date=date).count()