def process_hourly_activity(user, start, end, modelclass):
    if modelclass == AdRecord:
        qs = modelclass.objects.filter(_user_id=user.id, date__range=(start, end))
    else:
        qs = modelclass.objects.filter(user=user, date__range=(start, end))
    if not qs:
        return {}
    qss = QuerySetStats(qs, 'date')
    start_date = start.date()
    end_date = end.date()
    if start_date == end_date:
        end_date = start_date + timedelta(days=1)
    timeseries = qss.time_series(
                        start_date, end=end_date, interval='hours', date_field='date')
    activities = {}
    for t in timeseries:
        if t[1]:
            d = t[0]
            activity = get_hourly_activity(user, start_date, d.hour)
            if modelclass == AdRecord:
                for r in qs.filter(date__hour=d.hour):
                    activity.adrecords.add(r)
            elif modelclass == FBSponsored:
                for r in qs.filter(date__hour=d.hour):
                    activity.fbsponsored.add(r)
            elif modelclass == FBAd:
                for r in qs.filter(date__hour=d.hour):
                    activity.fbads.add(r)
            activity.adtotal = activity.adrecords.count()
            activity.fbadtotal = activity.fbads.count()
            activity.fbsptotal = activity.fbsponsored.count()
            activity.save()
            activities[d] = activity    
    return activities
Example #2
0
def get_scan_per_day(start_date, end_date):
    scan_qsstats = QuerySetStats(
        Scan.objects.all(),
        date_field='start',
    )
    scan_values = scan_qsstats.time_series(start_date,
                                           end_date,
                                           interval='days')
    return format_date(scan_values)
Example #3
0
    def test_time_series_weeks(self):
        day = datetime.date(year=2013, month=4, day=5)

        u = User.objects.create_user('user', '*****@*****.**')
        u.date_joined = day
        u.save()

        qs = User.objects.all()
        qss = QuerySetStats(qs, 'date_joined')
        qss.time_series(day - datetime.timedelta(days=30), day, interval='weeks')
Example #4
0
    def test_time_series_weeks(self):
        day = datetime.date(year=2013, month=4, day=5)

        u = User.objects.create_user('user', '*****@*****.**')
        u.date_joined = day
        u.save()

        qs = User.objects.all()
        qss = QuerySetStats(qs, 'date_joined')
        qss.time_series(day - datetime.timedelta(days=30), day, interval='weeks')
Example #5
0
def test(request):
    """Just test page."""
    template_name = 'runstat/test.html'
    #
    start_date = datetime.strptime('2016-05-01', '%Y-%m-%d').date()
    end_date = datetime.strptime('2016-05-31', '%Y-%m-%d').date()
    qs = GroupPost.objects.all()
    qss = QuerySetStats(qs, date_field='created_time')
    values = qss.time_series(start_date, end_date, interval='days')

    return render(request, template_name, {'values': values})
Example #6
0
 def assertTimeSeriesWorks(self, today):
     seven_days_ago = today - datetime.timedelta(days=7)
     for j in range(1,8):
         for i in range(0,j):
             u = User.objects.create_user('p-%s-%s' % (j, i), '*****@*****.**' % (j, i))
             u.date_joined = today - datetime.timedelta(days=i)
             u.save()
     qs = User.objects.all()
     qss = QuerySetStats(qs, 'date_joined')
     time_series = qss.time_series(seven_days_ago, today)
     self.assertEqual([t[1] for t in time_series], [0, 1, 2, 3, 4, 5, 6, 7])
 def assertTimeSeriesWorks(self, today):
     seven_days_ago = today - datetime.timedelta(days=7)
     for j in range(1,8):
         for i in range(0,j):
             u = User.objects.create_user('p-%s-%s' % (j, i), '*****@*****.**' % (j, i))
             u.date_joined = today - datetime.timedelta(days=i)
             u.save()
     qs = User.objects.all()
     qss = QuerySetStats(qs, 'date_joined')
     time_series = qss.time_series(seven_days_ago, today)
     self.assertEqual([t[1] for t in time_series], [0, 1, 2, 3, 4, 5, 6, 7])
Example #8
0
    def test_basic_today(self):
        # We'll be making sure that this user is found
        u1 = User.objects.create_user('u1', '*****@*****.**')
        # And that this user is not
        u2 = User.objects.create_user('u2', '*****@*****.**')
        u2.is_active = False
        u2.save()

        # Create a QuerySet and QuerySetStats
        qs = User.objects.filter(is_active=True)
        qss = QuerySetStats(qs, 'date_joined')
        
        # We should only see a single user
        self.assertEqual(qss.this_day(), 1)
Example #9
0
    def get_models_data(self, querysets):
        """ querysets = {model: (queryset, time_field)} """
        for model_type, (queryset, time_field) in querysets.iteritems():
            qss = QuerySetStats(queryset, time_field)

            data = qss.time_series(self.startdate, self.enddate)

            counts = [d[1] for d in data]
            qss.last_week = sum(counts[-7:])
            qss.last_month = sum(counts[-30:])

            self.day_captions = [t[0].day for t in data]

            setattr(self, model_type+'_qss', qss)
            setattr(self, model_type+'_values', counts)
Example #10
0
def graph(request):
    start = datetime.datetime(2013, 11, 21, 00, 00).date()
    end = datetime.datetime.now().date()
    users = User.objects.all()
    qsstats = QuerySetStats(users, date_field='last_login', aggregate=Count('id'))
    values = qsstats.time_series(start , end , interval='days' )

    staff = User.objects.all().filter(is_staff = '1').count()
    other = User.objects.all().count() - staff
    type_of_user = [['Суперпользователи',staff],['Пользователи',other]]
    start = datetime.datetime(2013, 12, 3, 00, 00).date()
    answer = Answer.objects.all()
    qsstats2 = QuerySetStats(answer, date_field='date', aggregate=Count('id'))
    ans = qsstats2.time_series(start,end, interval="days")
    return render(request,'graph.html', { 'ans':ans , 'users': values , 'type' : type_of_user })
Example #11
0
def graph2(request):
    today = datetime.date.today()
    start_date= today - datetime.timedelta(days=30)
    end_date = today - datetime.timedelta(days=10)

    available_orders=Order.objects.all()
    qsstats = QuerySetStats(available_orders, date_field='date_to_ship', aggregate=Count('id'))
    order_stats = qsstats.time_series(start_date, end_date, interval='days')

    available_incomings=Incoming.objects.all()
    qsstats = QuerySetStats(available_incomings, date_field='date_to_ship', aggregate=Count('id'))
    incoming_stats = qsstats.time_series(start_date, end_date, interval='days')

    orders_by_holders = Order.objects.values("client_name").annotate(Count("id"))

    return render_to_response('wms/graph2.html', {'order_stats':order_stats,'incoming_stats':incoming_stats, 'orders_by_holders':orders_by_holders}, context_instance=RequestContext(request))
Example #12
0
    def graph_users_properties(self):
        print 'Graph new properties from users -',
        attrs = {}
        attrs['legend'] = {1: u'Частник', 2: u'Агентство'}
        values = []
        for key in attrs['legend']:
            if key == 1:
                users = User.objects.filter(agencies__isnull=True)
            else:
                users = User.objects.filter(agencies__isnull=False)

            qs = Ad.objects.filter(user__in=users).only('created')
            end = datetime.today()
            start = end - timedelta(days=30)
            # готовим данные для графика
            data = [] if not qs else QuerySetStats(qs, 'created').time_series(
                start, end)
            values.append([t[1] for t in data])
        try:
            attrs['max_value'] = max([
                values[0][key] + values[1][key]
                for key in range(0, len(values[0]))
            ])
        except:
            pass
        attrs['captions'] = [t[0].day for t in data]
        attrs['values'] = values
        cache.set('graph_users_properties', attrs, 60 * 60 * 24 * 7)
        print 'OK'
Example #13
0
    def test_invalid_interval(self):
        qss = QuerySetStats(User.objects.all(), 'date_joined')

        def _invalid():
            qss.time_series(qss.today, qss.today, interval='monkeys')

        self.assertRaises(InvalidInterval, _invalid)
Example #14
0
    def graph_new_user(self):
        print 'Graph new users -',
        attrs = {}
        attrs['legend'] = {1: u'Частник', 2: u'Агентство'}
        values = []
        for key in attrs['legend']:
            if key == 1:
                qs = User.objects.filter(
                    is_active=1, agencies__isnull=True).only('date_joined')
            else:
                qs = User.objects.filter(
                    is_active=1, agencies__isnull=False).only('date_joined')

            end = datetime.today()
            start = end - timedelta(days=30)

            # готовим данные для графика
            data = QuerySetStats(qs, 'date_joined').time_series(start, end)
            values.append([t[1] for t in data])
        attrs['max_value'] = max([
            values[0][key] + values[1][key]
            for key in range(0, len(values[0]))
        ])
        attrs['captions'] = [t[0].day for t in data]
        attrs['values'] = values
        cache.set('graph_users', attrs, 60 * 60 * 24 * 7)
        print 'OK'
Example #15
0
    def test_after(self):
        now = compat.now()
        today = _remove_time(now)
        tomorrow = today + datetime.timedelta(days=1)

        u = User.objects.create_user('u', '*****@*****.**')
        u.date_joined = today
        u.save()

        qs = User.objects.all()
        qss = QuerySetStats(qs, 'date_joined')

        self.assertEqual(qss.after(today)[0], 1)
        self.assertEqual(qss.after(now)[0], 0)
        u.date_joined = tomorrow
        u.save()
        self.assertEqual(qss.after(now)[0], 1)
Example #16
0
    def get_registrations(self, interval, days, graph_key, select_box_value):
        """ Returns an array with new users count per interval."""
        try:
            conf_data = DashboardStats.objects.get(graph_key=graph_key)
            model_name = apps.get_model(conf_data.model_app_name,
                                        conf_data.model_name)
            kwargs = {}
            for i in conf_data.criteria.all():
                # fixed mapping value passed info kwargs
                if i.criteria_fix_mapping:
                    for key in i.criteria_fix_mapping:
                        # value => i.criteria_fix_mapping[key]
                        kwargs[key] = i.criteria_fix_mapping[key]

                # dynamic mapping value passed info kwargs
                if i.dynamic_criteria_field_name and select_box_value:
                    kwargs[i.dynamic_criteria_field_name] = select_box_value

            aggregate = None
            if conf_data.type_operation_field_name and conf_data.operation_field_name:
                operation = {
                    'Count': Count(conf_data.operation_field_name),
                    'Sum': Sum(conf_data.operation_field_name),
                    'Avg': Avg(conf_data.operation_field_name),
                    'StdDev': StdDev(conf_data.operation_field_name),
                    'Max': Max(conf_data.operation_field_name),
                    'Min': Min(conf_data.operation_field_name),
                    'Variance': Variance(conf_data.operation_field_name),
                }
                aggregate = operation[conf_data.type_operation_field_name]

            stats = QuerySetStats(model_name.objects.filter(**kwargs),
                                  conf_data.date_field_name, aggregate)
            # stats = QuerySetStats(User.objects.filter(is_active=True), 'date_joined')
            today = now()
            if days == 24:
                begin = today - timedelta(hours=days - 1)
                return stats.time_series(begin, today + timedelta(hours=1),
                                         interval)

            begin = today - timedelta(days=days - 1)
            return stats.time_series(begin, today + timedelta(days=1),
                                     interval)
        except (LookupError, FieldError, TypeError) as e:
            self.error_message = str(e)
            User = get_user_model()
            stats = QuerySetStats(User.objects.filter(is_active=True),
                                  'date_joined')
            today = now()
            if days == 24:
                begin = today - timedelta(hours=days - 1)
                return stats.time_series(begin, today + timedelta(hours=1),
                                         interval)
            begin = today - timedelta(days=days - 1)
            return stats.time_series(begin, today + timedelta(days=1),
                                     interval)
Example #17
0
    def test_after(self):
        today = datetime.date.today()
        tomorrow = today + datetime.timedelta(days=1)
        now = datetime.datetime.now()

        u = User.objects.create_user('u', '*****@*****.**')
        u.date_joined = today
        u.save()
        
        qs = User.objects.all()
        qss = QuerySetStats(qs, 'date_joined')

        self.assertEqual(qss.after(today), 1)
        self.assertEqual(qss.after(now), 0)
        u.date_joined=tomorrow
        u.save()
        self.assertEqual(qss.after(now), 1)
Example #18
0
def graph(request):
    available_clients=Client.objects.select_related('client').filter(referredclients__user=request.user.id)
    available_orders=Order.objects.select_related('client').filter(holder=available_clients)
    qsstats = QuerySetStats(available_orders, date_field='date_to_ship', aggregate=Count('id'))
    today = datetime.date.today()
    five_year_ago = today - datetime.timedelta(days=30)
    order_stats = qsstats.time_series(five_year_ago, today, interval='days')
    return render_to_response('wms/graph.html', {'order_stats':order_stats}, context_instance=RequestContext(request))
Example #19
0
 def timeseries_by_day(self, entity_type, days_ago=7):
     now = datetime.date.today()
     beg_day = now - datetime.timedelta(days=days_ago)
     qs = super(HistoryManager, self).filter(entity_type=entity_type)
     ts = QuerySetStats(qs, 'timestamp').time_series(beg_day, now)
     ret = []
     for dt, count in ts:
         ret.append({'time':dt.isoformat(), 'count': count})
     return ret
Example #20
0
 def graph_feeds_properties(self):
     print 'Graph new properties from feeds -',
     attrs = {}
     qs = Ad.objects.filter(user__isnull=1, status__lt=200).only('created')
     end = datetime.today()
     start = end - timedelta(days=30)
     data = QuerySetStats(qs, 'created').time_series(start, end)
     attrs['values'] = [t[1] for t in data]
     attrs['captions'] = [t[0].day for t in data]
     cache.set('graph_feeds_properties', attrs, 60 * 60 * 24 * 7)
     print 'OK'
Example #21
0
File: views.py Project: caiwan/wimm
    def list(self, request):
        date_from = parse(request.GET['from']).date()
        date_to = parse(request.GET['to']).date()
        interval = request.GET['interval']

        assert interval in {'years', 'months', 'weeks', 'days'}

        qs = Item.objects.all()
        qss = QuerySetStats(qs, date_field='date', aggregate=Sum('price')). \
                time_series(date_from, date_to, interval=interval)

        return Response((_[0].date(), _[1]) for _ in qss)
Example #22
0
def time_series(queryset, date_field, interval, func=None, agg=None, annotate_field=None):
    if agg in (None, "daily"):
        qsstats = QuerySetStats(queryset, date_field, func)
        return qsstats.time_series(*interval)
    else:
        # Custom aggregation was set (weekly/monthly/yearly)
        agg_by = agg[:-2]

        # We need to set the range dynamically
        interval_filter = {date_field + "__gte": interval[0], date_field + "__lte": interval[1]}

        # Slightly raw-ish SQL query using extra

        truncate_date = connections[queryset.db].ops.date_trunc_sql(agg_by, date_field)
        if not annotate_field:
            # By default we always annotate by dates, counting all the
            # records recovered for the given dates
            annotate_field = date_field
            result = (
                queryset.extra(select={agg_by: truncate_date})
                .values_list(agg_by)
                .annotate(Count(annotate_field))
                .filter(**interval_filter)
                .order_by(agg_by)
            )
        else:
            # We do a Sum by annotation field, we are assuming if the
            # annotation is not by dates it will be a field which type can be
            # added, like prices for example.
            result = (
                queryset.extra(select={agg_by: truncate_date})
                .values_list(agg_by)
                .annotate(Sum(annotate_field))
                .filter(**interval_filter)
                .order_by(agg_by)
            )

        return fill_missing_dates_with_zeros(result, agg_by, interval)
Example #23
0
 def changelist_view(self, request, **kwargs):
     from qsstats import QuerySetStats
     from django.db.models import Count, Min
     from django.utils import timezone
     today = timezone.now()
     start_day = Visits.objects.all().aggregate(Min('datetime')).values()
     #print(start_day[0])
     #print(today)
     qsstats = QuerySetStats(Visits.objects.all(),
                             date_field='datetime',
                             aggregate=Count('id'))
     values = qsstats.time_series(start_day[0], today, interval='days')
     args = {
         'values': values,
         'opts': Visits._meta,
         'app_label': Visits._meta.app_label,
         'site_header': 'Welcome page visits statistic',
         'site_title': "Statistic"
     }
     from django.shortcuts import render_to_response
     from django.template import RequestContext
     return render_to_response('admin/app/visits/change_list.html',
                               args,
                               context_instance=RequestContext(request))
Example #24
0
    def get(self, request, format=None):
        """
        return data for graph
        """
        try:

            obj_id = request.query_params.get('equipment', 0)
            end_date = request.query_params.get('end_date', timezone.now())
            start_date = request.query_params.get('start_date')

            equip = Equipment.objects.filter(id=obj_id).first()
            if isinstance(end_date, str):
                end_date = parse_datetime(end_date) or parse_date(end_date)
            if start_date is None:
                start_date = end_date - datetime.timedelta(days=1)

            qs = RawData.objects.filter(
                mac_address=equip.xbee_mac,
                channel=equip.main_channel).order_by('date')
            qss = QuerySetStats(qs=qs,
                                date_field='date',
                                aggregate=Avg('value'))
            time_series = qss.time_series(start_date,
                                          end_date,
                                          interval='minutes')

            data = {
                'equipment': str(equip),
                'end_date': end_date,
                'ts': time_series
            }

        except Exception as e:
            raise Http404('Error in parameters')

        return Response(data)
Example #25
0
def moderation_stats(request):
    from qsstats import QuerySetStats
    today = datetime.datetime.today()
    date_list = [today - datetime.timedelta(days=x) for x in range(0, 40)]
    start = date_list[-1]

    moderators = dict(set(Moderation.objects.filter(end_time__gt=start, moderator__isnull=False).values_list('moderator__email', 'moderator_id')))
    for moderator_email, moderator_id in moderators.items():
        # проверка start_time__lt=F('end_time') используется, чтобы отсечь модерации без заявок (когда модератор просто отклоняет опубликованное объявление, которые не изменялось)
        qs = Moderation.objects.filter(moderator_id=moderator_id, start_time__lt=F('end_time')).only('end_time')
        data = QuerySetStats(qs, 'end_time').time_series(start, today)
        if max([x[1] for x in data]) > 15:
            moderators[moderator_email] = data
        else:
            del moderators[moderator_email]

    return render(request, 'admin/moderation_stats.jinja.html', locals())
    def get_registrations(self, interval, days, graph_key, select_box_value):
        """ Returns an array with new users count per interval."""
        try:
            conf_data = DashboardStats.objects.get(graph_key=graph_key)
            model_name = get_model(conf_data.model_app_name,
                                   conf_data.model_name)
            kwargs = {}

            for i in conf_data.criteria.all():
                # fixed mapping value passed info kwargs
                if i.criteria_fix_mapping:
                    for key in i.criteria_fix_mapping:
                        # value => i.criteria_fix_mapping[key]
                        kwargs[key] = i.criteria_fix_mapping[key]

                # dynamic mapping value passed info kwargs
                if i.dynamic_criteria_field_name and select_box_value:
                    kwargs[i.dynamic_criteria_field_name] = select_box_value

            stats = QuerySetStats(model_name.objects.filter(**kwargs),
                                  conf_data.date_field_name)
            #stats = QuerySetStats(User.objects.filter(is_active=True), 'date_joined')
            today = now()
            if days == 24:
                begin = today - timedelta(hours=days - 1)
                return stats.time_series(begin, today + timedelta(hours=1),
                                         interval)

            begin = today - timedelta(days=days - 1)
            return stats.time_series(begin, today + timedelta(days=1),
                                     interval)
        except:
            User = get_user_model()
            stats = QuerySetStats(User.objects.filter(is_active=True),
                                  'date_joined')
            today = now()
            if days == 24:
                begin = today - timedelta(hours=days - 1)
                return stats.time_series(begin, today + timedelta(hours=1),
                                         interval)
            begin = today - timedelta(days=days - 1)
            return stats.time_series(begin, today + timedelta(days=1),
                                     interval)
    def get_registrations(self, interval, days, graph_key, select_box_value):
        """ Returns an array with new users count per interval."""
        try:
            conf_data = DashboardStats.objects.get(graph_key=graph_key)
            model_name = get_model(conf_data.model_app_name, conf_data.model_name)
            kwargs = {}
            for i in conf_data.criteria.all():
                # fixed mapping value passed info kwargs
                if i.criteria_fix_mapping:
                    for key in i.criteria_fix_mapping:
                       # value => i.criteria_fix_mapping[key]
                        kwargs[key] = i.criteria_fix_mapping[key]

                # dynamic mapping value passed info kwargs
                if i.dynamic_criteria_field_name and select_box_value:
                    kwargs[i.dynamic_criteria_field_name] = select_box_value

            aggregate = None
            if conf_data.type_operation_field_name and conf_data.operation_field_name:
                operation={'Sum': Sum(conf_data.operation_field_name),
                        'Avg':Avg(conf_data.operation_field_name),
                        'StdDev':StdDev(conf_data.operation_field_name),
                        'Max':Max(conf_data.operation_field_name),
                        'Min':Min(conf_data.operation_field_name),
                        'Variance':Variance(conf_data.operation_field_name),
                        }
                aggregate=operation[conf_data.type_operation_field_name]

            stats = QuerySetStats(model_name.objects.filter(**kwargs),
                                      conf_data.date_field_name, aggregate)
            #stats = QuerySetStats(User.objects.filter(is_active=True), 'date_joined')
            today = now()
            if days == 24:
                begin = today - timedelta(hours=days - 1)
                return stats.time_series(begin, today + timedelta(hours=1), interval)

            begin = today - timedelta(days=days - 1)
            return stats.time_series(begin, today + timedelta(days=1), interval)
        except:
            User = get_user_model()
            stats = QuerySetStats(
                User.objects.filter(is_active=True), 'date_joined')
            today = now()
            if days == 24:
                begin = today - timedelta(hours=days - 1)
                return stats.time_series(begin, today + timedelta(hours=1), interval)
            begin = today - timedelta(days=days - 1)
            return stats.time_series(begin, today + timedelta(days=1), interval)
Example #28
0
    def test_until(self):
        today = datetime.date.today()
        yesterday = today - datetime.timedelta(days=1)
        now = datetime.datetime.now()

        u = User.objects.create_user('u', '*****@*****.**')
        u.date_joined = today
        u.save()
        
        qs = User.objects.all()
        qss = QuerySetStats(qs, 'date_joined')

        self.assertEqual(qss.until(now), 1)
        self.assertEqual(qss.until(today), 1)
        self.assertEqual(qss.until(yesterday), 0)
        self.assertEqual(qss.until_now(), 1)
Example #29
0
    def test_until(self):
        now = compat.now()
        today = _remove_time(now)
        yesterday = today - datetime.timedelta(days=1)

        u = User.objects.create_user('u', '*****@*****.**')
        u.date_joined = today
        u.save()

        qs = User.objects.all()
        qss = QuerySetStats(qs, 'date_joined')

        self.assertEqual(qss.until(now)[0], 1)
        self.assertEqual(qss.until(today)[0], 1)
        self.assertEqual(qss.until(yesterday)[0], 0)
        self.assertEqual(qss.until_now()[0], 1)
Example #30
0
def detail(request, user_id):
    try:
        user = User.objects.get(id=user_id)
        statistic = Statistic.objects.filter(user=user_id)

        if 'start' in request.GET and request.GET[
                'start'] and 'end' in request.GET and request.GET['end']:
            start_date = datetime.strptime(request.GET['start'],
                                           '%Y-%m-%d').date()
            end_date = datetime.strptime(request.GET['end'], '%Y-%m-%d').date()
        else:
            end_date = Statistic.get_end_date(user_id=user_id)
            start_date = end_date - timedelta(days=6)

        qsstats_page_views = QuerySetStats(statistic,
                                           date_field='date',
                                           aggregate=Sum('page_views'))
        charts_page_views = qsstats_page_views.time_series(start_date,
                                                           end_date,
                                                           interval='days')

        qsstats_clicks = QuerySetStats(statistic,
                                       date_field='date',
                                       aggregate=Sum('clicks'))
        charts_clicks = qsstats_clicks.time_series(start_date,
                                                   end_date,
                                                   interval='days')

        context = {
            'user': user,
            'statistic': statistic,
            'start_date': start_date,
            'end_date': end_date,
            'charts_page_views': charts_page_views,
            'charts_clicks': charts_clicks
        }
    except:
        raise Http404('User is not found.')
    return render(request, 'users/detail.html', context=context)
    def get_registrations(self, interval, days, graph_key, select_box_value):
        """ Returns an array with new users count per interval."""
        try:
            conf_data = DashboardStats.objects.get(graph_key=graph_key)
            model_name = get_model(
                conf_data.model_app_name, conf_data.model_name)
            kwargs = {}

            for i in conf_data.criteria.all():
                # fixed mapping value passed info kwargs
                if i.criteria_fix_mapping:
                    for key in i.criteria_fix_mapping:
                        # value => i.criteria_fix_mapping[key]
                        kwargs[key] = i.criteria_fix_mapping[key]

                # dynamic mapping value passed info kwargs
                if i.dynamic_criteria_field_name and select_box_value:
                    kwargs[i.dynamic_criteria_field_name] = select_box_value

            stats = QuerySetStats(model_name.objects.filter(**kwargs),
                                  conf_data.date_field_name)
            #stats = QuerySetStats(User.objects.filter(is_active=True), 'date_joined')
            today = now()
            if days == 24:
                begin = today - timedelta(hours=days - 1)
                return stats.time_series(begin, today + timedelta(hours=1), interval)

            begin = today - timedelta(days=days - 1)
            return stats.time_series(begin, today + timedelta(days=1), interval)
        except:
            stats = QuerySetStats(
                User.objects.filter(is_active=True), 'date_joined')
            today = now()
            if days == 24:
                begin = today - timedelta(hours=days - 1)
                return stats.time_series(begin, today + timedelta(hours=1), interval)
            begin = today - timedelta(days=days - 1)
            return stats.time_series(begin, today + timedelta(days=1), interval)
 def get_registrations(self, interval, days):
     """ Returns an array with new users count per interval """
     stats = QuerySetStats(User.objects.filter(is_active=True), 'date_joined')
     today = datetime.today()
     begin = today - timedelta(days=days-1)
     return stats.time_series(begin, today+timedelta(days=1), interval)
Example #33
0
 def response_timeseries(self):
     """
     Returns a list of timeseries from the responses.
     """
     qss = QuerySetStats(self.all(), "created")
     return qss.time_series(timezone.now() - timedelta(days=7), timezone.now())
Example #34
0
 def get_data(self, interval, days):
     """ Returns an array with new users count per interval """
     stats = QuerySetStats(self.queryset, self.date_field)
     today = datetime.date.today()
     begin = today - datetime.timedelta(days=days-1)
     return stats.time_series(begin, today, interval)
Example #35
0
def update_intervals():
    """
    periodic function to transform RawData to GraphicsData and ClassifiedIntervals
    :return:
    """
    # Check if ClassifiedIntervals doesnt locked
    lock = Semaphore.objects.filter(name=ClassifiedInterval.__name__).first()
    if lock and lock.is_locked:
        # TODO - check if locked period is not too long
        print('Locked!!!')
        return  # Locked - nothing to do

    available_reasons = Reason.objects.filter(
        code__in=['000', '001', '002']).order_by('code')
    for eq in Equipment.objects.all():
        try:
            last_date = ClassifiedInterval.objects.filter(
                equipment_id__exact=eq.id).order_by('-end').first().end
        except Exception:
            try:
                last_date = RawData.objects.filter(
                    mac_address=eq.xbee_mac).order_by('date').first().date
            except Exception:
                continue  # No data at all

        # print(f'eq = {eq}; last_date = {last_date}')

        qs = RawData.objects.filter(mac_address=eq.xbee_mac,
                                    channel=eq.main_channel,
                                    date__gte=last_date)

        ts = QuerySetStats(qs, date_field='date',
                           aggregate=Avg('value')).time_series(
                               start=last_date,
                               end=timezone.now(),
                               interval='minutes')
        # print(ts[0][0], ts[-1][0])
        prev_reason = None
        start = ts[0][0]
        for t in ts:
            if t[1] >= eq.no_load_threshold:
                cur_reason = available_reasons[0]
            else:
                cur_reason = available_reasons[1]

            # Do not forget to apply timetables

            if prev_reason is not None and (cur_reason.id != prev_reason.id
                                            or t[0] == ts[-1][0]):
                # print('adding interval {0} {1} {2}'.format(start, t[0], cur_reason))
                try:
                    ClassifiedInterval.add_interval(start=start,
                                                    end=t[0],
                                                    equipment=eq,
                                                    classification=prev_reason)
                except Exception as e:
                    exc_type, exc_obj, exc_tb = sys.exc_info()
                    fname = os.path.split(
                        exc_tb.tb_frame.f_code.co_filename)[1]
                    #logger.error('{0}, {1}, {2}\n{3}'.format(exc_type, fname,
                    #                                                        exc_tb.tb_lineno, e))
                    print('{0}, {1}, {2}\n{3}'.format(exc_type, fname,
                                                      exc_tb.tb_lineno, e))
                    return
                prev_reason = cur_reason
                start = t[0]
            else:
                prev_reason = cur_reason

        # clear data in RawData between last_date and date_from
        last_gd = GraphicsData.objects.filter(
            equipment=eq).order_by('-date').first()
        if not last_gd:
            # No GraphicsData at all
            date_from = RawData.objects.filter(mac_address=eq.xbee_mac, channel=eq.main_channel) \
                                        .order_by('date').first().date
        else:
            date_from = last_gd.date if last_gd else last_date
            date_from += timedelta(minutes=1)
        if date_from < last_date:
            qs = RawData.objects.filter(
                mac_address=eq.xbee_mac,
                channel=eq.main_channel,
                date__gte=date_from
            )  # Dont need to add last_date - in't error!
            # print(date_from, last_date)
            # print(last_date)
            ts = QuerySetStats(qs, date_field='date',
                               aggregate=Avg('value')).time_series(
                                   start=date_from,
                                   end=last_date,
                                   interval='minutes')
            # print(ts)
            with transaction.atomic():
                # write all grouped RawData object into GraphicsData and delete RawData
                GraphicsData.objects.bulk_create([
                    GraphicsData(equipment=eq, date=t[0], value=t[1])
                    for t in ts
                ])
                # clear RawData
                RawData.objects.filter(mac_address=eq.xbee_mac,
                                       date__gte=date_from,
                                       date__lte=last_date +
                                       timedelta(minutes=1)).delete()
        else:
            print('Nothing to update')
Example #36
0
 def test_date_field_missing(self):
     qss = QuerySetStats(User.objects.all())
     for method in ['this_day', 'this_month', 'this_year']:
         self.assertRaises(DateFieldMissing, getattr(qss, method))
Example #37
0
def time_series(queryset, date_field, interval, func=None):
    qsstats = QuerySetStats(queryset, date_field, func)
    return qsstats.time_series(*interval)
Example #38
0
def get_scan_per_day(start_date, end_date):
    scan_qsstats = QuerySetStats(Scan.objects.all(),
                                 date_field='start',)
    scan_values = scan_qsstats.time_series(start_date, end_date, interval='days')
    return format_date(scan_values)
Example #39
0
def rebuild_intervals(equipment=None, start=None, end=None):
    """
    use this function to rebuild intervals after changing levels or found errors
    :param equipment: model.Equipment object or pk
    :param start: start datetime, if None - from very beginning
    :param end: end detatime, if None - till now
    :return: None
    """
    try:
        # First of all lock ClassifiedInterval update
        semaphores = Semaphore.objects.filter(name=ClassifiedInterval.__name__)
        if semaphores:
            lock = semaphores.first()
            lock.lock_when = timezone.now()
            lock.is_locked = True
        else:
            lock = Semaphore(name=ClassifiedInterval.__name__, is_locked=True)
        lock.save()

        # Next define set of ClassifiedIntervals to rebuild
        if equipment is None:
            equipment_set = Equipment.objects.all()
        elif isinstance(equipment, Equipment):
            equipment_set = [equipment]
        elif isinstance(equipment, int):
            equipment_set = Equipment.objects.filter(id=equipment)
        else:
            raise AttributeError(
                'Invalid equipment {0}. It must be Equipment object or int'.
                format(equipment))

        # Next define start and end values
        if start is None:
            macs = [x.xbee_mac for x in equipment_set]
            # DONE - change to another object
            first_rd = RawData.objects.filter(
                mac_address__in=macs).order_by('date').first()
            first_gd = GraphicsData.objects.filter(
                equipment__in=equipment_set).order_by('date').first()
            period_start = first_gd.date if first_gd is not None else first_rd.date
        elif not isinstance(start, datetime):
            period_start = dateparse.parse_date(
                start) or dateparse.parse_datetime(start)
            if period_start is None:
                raise AttributeError(
                    'Wrong date or datetime format {0}. Use standard one'.
                    format(start))
        else:
            period_start = start
        if end is None:
            period_end = timezone.now()
        elif not isinstance(end, datetime):
            period_end = dateparse.parse_date(end) or dateparse.parse_datetime(
                end)
            if period_end is None:
                raise AttributeError(
                    'Wring date or datetime format {0}. Use standard one'.
                    format(end))
        else:
            period_end = end

        print(period_start, period_end)

        # Main cycle
        detected_intervals = []
        for equip in equipment_set:
            # DONE  - will not working with RawData!
            rd_start = RawData.objects.filter(
                mac_address=equip.xbee_mac,
                date__gte=period_start,
                date__lte=period_end).order_by('date').first()
            if rd_start:  # RawData are present
                rd_end = RawData.objects.filter(
                    mac_address=equip.xbee_mac,
                    date__gte=period_start,
                    date__lte=period_end).order_by('date').last()

                qs = RawData.objects.filter(
                    mac_address=equip.xbee_mac,
                    channel=equip.main_channel,
                    date__gte=rd_start.date,
                    date__lte=rd_end.date).order_by('date')
                ts = QuerySetStats(qs,
                                   date_field='date',
                                   aggregate=Avg('value')).time_series(
                                       start=rd_start.date,
                                       end=period_end,
                                       interval='minutes')
                # Any possible RawData should transform to GraphicsData
                with transaction.atomic():
                    # Remove possible doubles
                    GraphicsData.objects.filter(equipment=equip,
                                                date__gte=ts[0][0],
                                                date__lt=ts[-1][0]).delete()
                    # Create new data from ts
                    GraphicsData.objects.bulk_create([
                        GraphicsData(equipment=equip, date=t[0], value=t[1])
                        for t in ts
                    ])
                    # Remove data from RawData (including not used channels)
                    RawData.objects.filter(mac_address=equip.xbee_mac,
                                           date__gte=rd_start.date,
                                           date__lte=rd_end.date).delete()

            # Rebuild time series
            qs = GraphicsData.objects.filter(
                equipment=equip, date__gte=period_start,
                date__lt=period_end).order_by('date')
            ts = [[d.date, d.value] for d in qs]
            cur_start = ts[0][0]
            cur_is_work = ts[0][1] >= equip.no_load_threshold
            intervals = []
            for i, t in enumerate(ts[1:]):
                is_work = (t[1] >= equip.no_load_threshold)
                if is_work != cur_is_work or t[0] == ts[-1][0]:
                    if t[0] == ts[-1][0]:  # last element
                        interval = {
                            'start': cur_start,
                            'end': t[0],
                            'is_work': cur_is_work
                        }
                        if not cur_is_work:
                            minutes = int(
                                (t[0] - cur_start).total_seconds() // 60)
                            if np.mean([x[1] for x in ts[i - minutes:i]]) < 5:
                                interval['is_zero'] = 0
                        intervals.append(interval)
                    elif is_work:  # change from 0 to 1
                        delta = t[0] - cur_start
                        minutes = int(delta.total_seconds() // 60)
                        if minutes >= equip.allowed_idle_interval or not intervals or t[
                                0] == ts[-1][0]:
                            interval = {
                                'start': cur_start,
                                'end': t[0],
                                'is_work': False,
                            }
                            if minutes <= i and i > 0:
                                # If interval has no data (mean value is almost 0)
                                if np.mean([x[1]
                                            for x in ts[i - minutes:i]]) < 5:
                                    interval['is_zero'] = True
                                # If interval with user data exists, save reason and user
                                interval_qs = ClassifiedInterval.objects.filter(
                                    equipment=equip,
                                    start__range=(cur_start -
                                                  timedelta(minutes=3),
                                                  cur_start +
                                                  timedelta(minutes=3)),
                                    end__range=(t[0] - timedelta(minutes=3),
                                                t[0] + timedelta(minutes=3)),
                                    automated_classification__is_working=False,
                                    user_classification__isnull=False)
                                if interval_qs:
                                    interval_db = interval_qs.first()
                                    interval[
                                        'user_classification'] = interval_db.user_classification
                                    interval['user'] = interval_db.user
                            intervals.append(interval)
                            cur_start = t[0]
                            cur_is_work = True
                        else:  # remove current state, pretend to be working
                            cur_start = intervals[-1]['start']
                            cur_is_work = True
                            del intervals[-1]
                    else:
                        interval = {
                            'start': cur_start,
                            'end': t[0],
                            'is_work': True
                        }
                        intervals.append(interval)
                        cur_start = t[0]
                        cur_is_work = False
            # debugging information - output
            # print(intervals)
            # durs = [(x['end'] - x['start']).total_seconds()//60 for x in intervals if not x['is_work']]
            # print(durs)
            # end debugging information, creating intervals
            left_interval = ClassifiedInterval.objects.filter(
                equipment=equip, start__lt=period_start,
                end__gte=period_start).first()
            right_interval = ClassifiedInterval.objects.filter(
                equipment=equip, start__lte=period_end,
                end__gt=period_end).first()
            int_start = left_interval.end if left_interval else period_start
            int_end = right_interval.start if right_interval else period_end
            with transaction.atomic():
                # Remove intervals between int_start and int_end
                ClassifiedInterval.objects.filter(equipment=equip,
                                                  start__gte=int_start,
                                                  end__lte=int_end).delete()
                # very special case - left and rights intervals are the same
                if left_interval and right_interval and left_interval.pk == right_interval.pk:
                    if len(intervals) == 1 and intervals[0][
                            'is_work'] == left_interval.is_work:
                        continue  # Nothing to do
                    else:  # Need to create new right interval
                        end_interval = left_interval.end
                        left_interval.end = intervals[0]['start']
                        left_interval.save()
                        right_interval = ClassifiedInterval(
                            equipment=equip,
                            automated_classification=left_interval.
                            automated_classification,
                            user_classification=left_interval.
                            user_classification,
                            is_zero=left_interval.is_zero,
                            start=intervals[-1]["end"],
                            end=end_interval,
                            user=left_interval.user)
                        right_interval.save()
                # try to join left interval
                if left_interval:
                    if intervals[0][
                            'is_work'] == left_interval.automated_classification.is_working:
                        left_interval.end = intervals[0]['end']
                        left_interval.save()
                        del intervals[0]
                    else:
                        left_interval.end = intervals[0]['start']
                        left_interval.save()
                # try to join right interval
                if right_interval:
                    if intervals[-1][
                            'is_work'] == right_interval.automated_classification.is_working:
                        right_interval.start = intervals[-1]['start']
                        right_interval.save()
                        del intervals[-1]
                    else:
                        right_interval.start = intervals[-1]['end']
                        right_interval.save()
                # prepare automated classification
                available_reasons = Reason.objects.filter(
                    code__in=['000', '001', '002']).order_by('code')
                ClassifiedInterval.objects.bulk_create([
                    ClassifiedInterval(
                        equipment=equip,
                        start=ci['start'],
                        end=ci['end'],
                        is_zero=ci.get('is_zero', False),
                        user_classification=ci.get('user_classification'),
                        user=ci.get('user'),
                        automated_classification=(available_reasons[0]
                                                  if ci['is_work'] else
                                                  available_reasons[1]))
                    for ci in intervals
                ])  # End of bulk_create
        # End of main loop

    except Exception as e:
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        print(exc_type, fname, exc_tb.tb_lineno)
        print(e)
    finally:
        semaphore = Semaphore.objects.filter(
            name=ClassifiedInterval.__name__).first()
        if semaphore:
            semaphore.is_locked = False
            semaphore.save()
        else:
            print('Can not unlock!!!!!')
Example #40
0
 def get_data(self, interval, days):
     """ Returns an array with new users count per interval """
     stats = QuerySetStats(self.queryset, self.date_field)
     today = datetime.date.today()
     begin = today - datetime.timedelta(days=days - 1)
     return stats.time_series(begin, today, interval)
Example #41
0
 def test_query_set_missing(self):
     qss = QuerySetStats(date_field='foo')
     for method in ['this_day', 'this_month', 'this_year']:
         self.assertRaises(QuerySetMissing, getattr(qss, method))
 def get_data(self, interval, days):
     """ Returns an array with new users count per interval """
     stats = QuerySetStats(self.queryset, self.date_field, aggregate=self.aggregate)
     today = now()
     begin = today - timedelta(days=days - 1)
     return stats.time_series(begin, today + timedelta(days=1), interval)
Example #43
0
def valuebase(request):
    # формирование данных (таблица + фильтр)
    form = ValuebaseFilterForm(request.POST)
    values = Valuebase.objects.all()
    if form.is_valid():
        if form.cleaned_data['min_value']:
            values = values.filter(value__gte=form.cleaned_data['min_value'])

        if form.cleaned_data['max_value']:
            values = values.filter(value__lte=form.cleaned_data['max_value'])

        if form.cleaned_data['min_date']:
            values = values.filter(date__gte=form.cleaned_data['min_date'])

        if form.cleaned_data['max_date']:
            values = values.filter(date__lte=form.cleaned_data['max_date'])

        if form.cleaned_data['bt']:
            values = values.filter(id_bt__name=form.cleaned_data['bt'])

        if form.cleaned_data['office']:
            values = values.filter(
                id_office__office=form.cleaned_data['office'])

        if form.cleaned_data['city']:
            values = values.filter(id_city__city=form.cleaned_data['city'])
    table = ValuebaseTable(values)
    RequestConfig(request).configure(table)

    # формирование таблицы показателей
    res_form = ResForm(request.POST)
    res = Valuebase.objects.all()
    min_date = date(2019, 1, 1)
    max_date = date.today()
    if res_form.is_valid():
        if res_form.cleaned_data['bt_res']:
            res = res.filter(id_bt__name=res_form.cleaned_data['bt_res'])

        if res_form.cleaned_data['city_res']:
            res = res.filter(id_city__city=res_form.cleaned_data['city_res'])

        if res_form.cleaned_data['office_res']:
            res = res.filter(
                id_office__office=res_form.cleaned_data['office_res'])

        if res_form.cleaned_data['min_date_res']:
            res = res.filter(date__gte=res_form.cleaned_data['min_date_res'])
            # проверка на существование записей с минимальной датой
            if res_form.cleaned_data['min_date_res'] < values.aggregate(
                    Min('date')).get('date__min'):
                min_date = values.aggregate(Min('date')).get('date__min')
            else:
                min_date = res_form.cleaned_data['min_date_res']

        if res_form.cleaned_data['max_date_res']:
            res = res.filter(date__lte=res_form.cleaned_data['max_date_res'])
            # проверка на существование записей с максимальной датой
            if res_form.cleaned_data['max_date_res'] > values.aggregate(
                    Max('date')).get('date__max'):
                max_date = values.aggregate(Max('date')).get('date__max')
            else:
                max_date = res_form.cleaned_data['max_date_res']

        # вычисляем отношение показателя "Активные клиенты"
        act_clients = get_stat(res, 1, max_date, min_date)
        # вычисляем отношение показателя "Кредитные карты"
        credit_cards = get_stat(res, 2, max_date, min_date)
        # вычисляем отношение показателя "Costs"
        costs = get_stat(res, 3, max_date, min_date)
        # вычисляем отношение показателя "NBI"
        nbi = get_stat(res, 4, max_date, min_date)
    else:
        # задаем начальные значения для показателей, если форма не заполнена
        act_clients = get_stat_def(res, 1)
        credit_cards = get_stat_def(res, 2)
        costs = get_stat_def(res, 3)
        nbi = get_stat_def(res, 4)

    # формирование диаграммы

    start_date = min_date
    end_date = max_date
    queryset = Valuebase.objects.values_list('date', 'value')
    qss = QuerySetStats(
        queryset, date_field='date',
        aggregate=Avg('value'))  # вычисление среднего значения показателей
    graph = qss.time_series(start_date, end_date,
                            interval='months')  # за указанный период

    return render(
        request, 'valuebase.html', {
            'valuebase': table,
            'form': form,
            'res_form': res_form,
            'act_clients': round(act_clients * 100),
            'credit_cards': round(credit_cards * 100),
            'costs': round(costs * 100),
            'nbi': round(nbi * 100),
            'graph': graph,
            'queryset': qss.__dict__
        })
Example #44
0
def visitclick_data_json(request, *args, **kwargs):
	"""
		For default this function return a data list of week
	"""

	google_format = {'rows':[],'cols':[
			{'label':_('Time'),'type':'string','pattern':''},
			{'label':_('Click'),'type':'number','pattern':''},
	]}
	
	if datetime.now().hour == 0:
		date_init = datetime.now()-timedelta(hours=4)
	else:
		date_init = datetime.now().date()
	date_end = datetime.now()
	strftime='%H:%M'
	field='date'
	display='hours'

	if 'week' in request.GET.values() or 'week' in request.POST.values():
		date_end = get_first_dow(datetime.now().year, datetime.now().isocalendar()[1])
		date_init = date_end-timedelta(days=6)
		strftime='%d/%m'
		display='days'

	elif 'month' in request.GET.values() or 'month' in request.POST.values():
		date_init = date(date_end.year,date_end.month,1)
		strftime='%d/%m'
		display='days'

	elif 'year' in request.GET.values() or 'year' in request.POST.values():
		date_init = datetime.now() - timedelta(days=365)
		strftime='%m/%Y'
		display='months'

	elif 'custom' in request.GET.values() or 'custom' in request.POST.values():
		date_init = datetime.strptime(request.POST.get('date_init',False) or request.GET.get('date_init'),'%Y-%m-%d')
		date_end = datetime.strptime(request.POST.get('date_end',False) or request.GET.get('date_end'),'%Y-%m-%d')
		date_diff = date_end-date_init
		if (date_diff.days ==1):
			strftime='%d/%m'
			display='hours'
		elif (date_diff.days >2 and date_diff.days <=31):
			strftime='%d/%m'
			display='days'
		elif (date_diff.days > 31):
			strftime='%d/%m'
			display='months'

	elif 'clicks' in request.GET.values() or 'clicks' in request.POST.values():
		date_init = datetime.strptime(request.POST.get('date_init',False) or request.GET.get('date_init'),'%Y-%m-%d %H:%M:%S')
		date_end = request.POST.get('date_end',False) or request.GET.get('date_end',False)
		field='url'
		google_format['cols']=[
			{'label':_('Url'),'type':'string','pattern':''},
			{'label':_('Click'),'type':'number','pattern':''},
		]
		if date_end:
			date_end = datetime.strptime(date_end,'%Y-%m-%d %H:%M:%S')
		else:
			date_end =date_init+timedelta(hours=1)

	elif 'browsers' in request.GET.values() or 'browsers' in request.POST.values():
		field='browser'
		google_format['cols']=[
			{'label':_('Browsers'),'type':'string','pattern':''},
			{'label':_('Click'),'type':'number','pattern':''},
		]
	elif 'operating_system' in request.GET.values() or 'operate_system' in request.POST.values():
		field='operating_system'
		google_format['cols']=[
			{'label':_('Operating System'),'type':'string','pattern':''},
			{'label':_('Click'),'type':'number','pattern':''},
		]
	
	clicks = Click.objects.filter(
			Q(date__gte=date_init),
			Q(date__lte=date_end)
	)


	if not ('clicks' in request.GET.values() or 'clicks' in request.POST.values()) and \
	   not ('browsers' in request.GET.values() or 'browsers' in request.POST.values()) and \
	   not ('operating_system' in request.GET.values() or 'operating_system' in request.POST.values()):
		clicks = QuerySetStats(clicks, field).time_series(date_init,date_end, display,aggregate=Count(field))
	else:
		clicks= clicks.values(field).order_by(field).annotate(count=Count(field))


	if field == 'date':
		google_format['cols']=[
			{'label':_('Date'),'type':'string','pattern':''},
			{'label':_('Click'),'type':'number','pattern':''},
		]


	for click in clicks:
		if type(click) is tuple:
			value_line=click[0]
			label_line=click[0]
			value_column=str(click[1])
			label_column=click[1]
		if type(click) is dict:
			value_line, value_column = click.values()
			label_line, label_column = click.values()

		if isinstance(value_line,datetime):
			value_line=value_line.strftime(strftime)

		if field == 'browser' or field == 'operating_system':
			google_format['rows'].append({
				'c':[
						{'v':label_column,'f':value_column},
						{'v':label_line,'f':value_line},
					]
			})
		else:
			google_format['rows'].append({
				'c':[
						{'v':label_line,'f':value_line},
						{'v':label_column,'f':value_column},
					]
			})			
	
	return HttpResponse(
						simplejson.dumps(google_format,cls=DjangoJSONEncoder),
						mimetype='application/json'
	)
Example #45
0
def time_series(queryset, date_field, interval, func=None):
    qsstats = QuerySetStats(queryset, date_field, func)
    return qsstats.time_series(*interval)
Example #46
0
 def data_for_period(self, qs, start, end, interval='days'):
     """Return a time series of data for the given period."""
     qss = QuerySetStats(qs, self.datetime_field,
                         aggregate=self.aggregate)
     return qss.time_series(start, end, interval=interval)