Beispiel #1
0
def this_year():
    return Project.objects.all().aggregate(
        Max('proj_years'))['proj_years__max']
Beispiel #2
0
 # author_obj.book.add(11, 12)
 # books = Book.objects.filter(id__gt=10)
 # author_obj.book.add(*books)
 # 给Author_obj设置新的books,所有ID大于10的书籍,原有的数据会删除.
 # books = Book.objects.filter(id__gt=10)
 # author_obj.book.set(books)
 # 给Author_obj 删除指定书籍.
 # book_del = Book.objects.get(id=11)
 # author_obj.book.remove(book_del)
 # 清空author_obj关联的所有书籍. 注意数据库中表字段是否能为空.
 # author_obj.book.clear()
 # 聚合函数.
 from django.db.models import Avg, Sum, Max, Min, Count
 price_avg = Book.objects.all().aggregate(Avg("price"))
 price_sum = Book.objects.all().aggregate(Sum("price"))
 price_max = Book.objects.all().aggregate(Max("price"))
 # price_min指定返回数据的KEY.
 price_min = Book.objects.all().aggregate(price_min=Min("price"))
 price_all = Book.objects.all().aggregate(price_min=Min("price"),
                                          price_max=Max("price"),
                                          price_avg=Avg("price"))
 # 分组. 查询所有书籍关联的作者数量,Count后面直接跟表名.
 books_obj = Book.objects.all().annotate(author_num=Count("author"))
 for item in books_obj:
     print(item.name, item.author_num)
 # 所有书籍作者大于1的数据.
 books_obj = Book.objects.all().annotate(author_num=Count("author")).filter(
     author_num__gt=1)
 # F 和 Q 查询.
 from django.db.models import F, Q
 # F 查询,查询所有修改时间大于创建时间的数据.
Beispiel #3
0
 def __init__(self, provider, report_type):
     """Constructor."""
     self._mapping = [
         {
             'provider':
             Provider.PROVIDER_AZURE,
             'alias':
             'subscription_guid',  # FIXME: probably wrong
             'annotations': {},
             'end_date':
             'costentrybill__billing_period_end',
             'filters': {
                 'subscription_guid': [
                     {
                         'field': 'subscription_guid',
                         'operation': 'icontains',
                         'composition_key': 'account_filter'
                     },
                 ],
                 'service_name': {
                     'field': 'service_name',
                     'operation': 'icontains'
                 },
                 'resource_location': {
                     'field': 'resource_location',
                     'operation': 'icontains'
                 },
                 'instance_type': {
                     'field': 'instance_type',
                     'operation': 'icontains'
                 }
             },
             'group_by_options': [
                 'service_name', 'subscription_guid', 'resource_location',
                 'instance_type'
             ],
             'tag_column':
             'tags',
             'report_type': {
                 'costs': {
                     'aggregates': {
                         'cost':
                         Sum(
                             Coalesce(F('pretax_cost'),
                                      Value(0,
                                            output_field=DecimalField())) +
                             Coalesce(F('markup_cost'),
                                      Value(0, output_field=DecimalField()))
                         ),
                         'infrastructure_cost':
                         Sum('pretax_cost'),
                         'derived_cost':
                         Sum(Value(0, output_field=DecimalField())),
                         'markup_cost':
                         Sum(
                             Coalesce(F('markup_cost'),
                                      Value(0,
                                            output_field=DecimalField()))),
                     },
                     'aggregate_key':
                     'pretax_cost',
                     'annotations': {
                         'cost':
                         Sum(
                             Coalesce(F('pretax_cost'),
                                      Value(0,
                                            output_field=DecimalField())) +
                             Coalesce(F('markup_cost'),
                                      Value(0, output_field=DecimalField()))
                         ),
                         'infrastructure_cost':
                         Sum('pretax_cost'),
                         'derived_cost':
                         Value(0, output_field=DecimalField()),
                         'markup_cost':
                         Sum(
                             Coalesce(F('markup_cost'),
                                      Value(0,
                                            output_field=DecimalField()))),
                         'cost_units':
                         Coalesce(Max('currency'), Value('USD'))
                     },
                     'delta_key': {
                         'cost':
                         Sum(
                             Coalesce(F('pretax_cost'),
                                      Value(0,
                                            output_field=DecimalField())) +
                             Coalesce(F('markup_cost'),
                                      Value(0, output_field=DecimalField()))
                         )
                     },
                     'filter': [{}],
                     'cost_units_key':
                     'currency',
                     'cost_units_fallback':
                     'USD',
                     'sum_columns': [
                         'cost', 'infrastructure_cost', 'derived_cost',
                         'markup_cost'
                     ],
                     'default_ordering': {
                         'cost': 'desc'
                     },
                 },
                 'instance_type': {
                     'aggregates': {
                         'cost':
                         Sum(
                             Coalesce(F('pretax_cost'),
                                      Value(0,
                                            output_field=DecimalField())) +
                             Coalesce(F('markup_cost'),
                                      Value(0, output_field=DecimalField()))
                         ),
                         'infrastructure_cost':
                         Sum('pretax_cost'),
                         'derived_cost':
                         Sum(Value(0, output_field=DecimalField())),
                         'markup_cost':
                         Sum(
                             Coalesce(F('markup_cost'),
                                      Value(0,
                                            output_field=DecimalField()))),
                         'count':
                         Sum(Value(0, output_field=DecimalField())),
                         'usage':
                         Sum('usage_quantity'),
                     },
                     'aggregate_key':
                     'usage_quantity',
                     'annotations': {
                         'cost':
                         Sum(
                             Coalesce(F('pretax_cost'),
                                      Value(0,
                                            output_field=DecimalField())) +
                             Coalesce(F('markup_cost'),
                                      Value(0, output_field=DecimalField()))
                         ),
                         'infrastructure_cost':
                         Sum('pretax_cost'),
                         'derived_cost':
                         Value(0, output_field=DecimalField()),
                         'markup_cost':
                         Sum(
                             Coalesce(F('markup_cost'),
                                      Value(0,
                                            output_field=DecimalField()))),
                         'cost_units':
                         Coalesce(Max('currency'), Value('USD')),
                         'count':
                         Max('instance_count'),
                         'count_units':
                         Value('instance_types', output_field=CharField()),
                         'usage':
                         Sum('usage_quantity'),
                         # FIXME: Waiting on MSFT for usage_units default
                         'usage_units':
                         Coalesce(Max('unit_of_measure'),
                                  Value('Instance Type Placeholder'))
                     },
                     'delta_key': {
                         'usage': Sum('usage_quantity')
                     },
                     'filter': [{
                         'field': 'instance_type',
                         'operation': 'isnull',
                         'parameter': False
                     }],
                     'group_by': ['instance_type'],
                     'cost_units_key':
                     'currency',
                     'cost_units_fallback':
                     'USD',
                     'usage_units_key':
                     'unit_of_measure',
                     'usage_units_fallback':
                     'Instance Type Placeholder',  # FIXME: Waiting on MSFT
                     'count_units_fallback':
                     'instances',
                     'sum_columns': [
                         'usage', 'cost', 'infrastructure_cost',
                         'derived_cost', 'markup_cost', 'count'
                     ],
                     'default_ordering': {
                         'usage': 'desc'
                     },
                 },
                 'storage': {
                     'aggregates': {
                         'cost':
                         Sum(
                             Coalesce(F('pretax_cost'),
                                      Value(0,
                                            output_field=DecimalField())) +
                             Coalesce(F('markup_cost'),
                                      Value(0, output_field=DecimalField()))
                         ),
                         'usage':
                         Sum('usage_quantity'),
                         'infrastructure_cost':
                         Sum('pretax_cost'),
                         'markup_cost':
                         Sum(
                             Coalesce(F('markup_cost'),
                                      Value(0,
                                            output_field=DecimalField()))),
                         'derived_cost':
                         Sum(Value(0, output_field=DecimalField())),
                     },
                     'aggregate_key':
                     'usage_quantity',
                     'annotations': {
                         'cost':
                         Sum(
                             Coalesce(F('pretax_cost'),
                                      Value(0,
                                            output_field=DecimalField())) +
                             Coalesce(F('markup_cost'),
                                      Value(0, output_field=DecimalField()))
                         ),
                         'infrastructure_cost':
                         Sum('pretax_cost'),
                         'derived_cost':
                         Value(0, output_field=DecimalField()),
                         'markup_cost':
                         Sum(
                             Coalesce(F('markup_cost'),
                                      Value(0,
                                            output_field=DecimalField()))),
                         'cost_units':
                         Coalesce(Max('currency'), Value('USD')),
                         'usage':
                         Sum('usage_quantity'),
                         # FIXME: Waiting on MSFT for usage_units default
                         'usage_units':
                         Coalesce(Max('unit_of_measure'),
                                  Value('Storage Type Placeholder'))
                     },
                     'delta_key': {
                         'usage': Sum('usage_quantity')
                     },
                     'filter': [{
                         'field': 'service_name',
                         'operation': 'contains',
                         'parameter': 'Storage'
                     }],
                     'cost_units_key':
                     'currency',
                     'cost_units_fallback':
                     'USD',
                     'usage_units_key':
                     'unit_of_measure',
                     'usage_units_fallback':
                     'Storage Type Placeholder',  # FIXME
                     'sum_columns': [
                         'usage', 'cost', 'infrastructure_cost',
                         'derived_cost', 'markup_cost'
                     ],
                     'default_ordering': {
                         'usage': 'desc'
                     },
                 },
                 'tags': {
                     'default_ordering': {
                         'cost': 'desc'
                     },
                 },
             },
             'start_date':
             'costentrybill__billing_period_start',
             'tables': {
                 'query': AzureCostEntryLineItemDailySummary,
             },
         },
     ]
     super().__init__(provider, report_type)
Beispiel #4
0
def query_pvi_info_h5(pvi_name, pvi_info=PVIQueryInfo.Energy_Today):
    '''
    all pv inverter type should implement this function for all PVIQueryInfo
    '''
    logger.debug('query_pvi_info({pvi_name},{pvi_info})'.format(
        pvi_name=pvi_name, pvi_info=pvi_info))
    time_since = (datetime.now() + timedelta(minutes=-30)).time()
    #time_since = datetime.combine(datetime.now().date(),time.min)
    time_until = datetime.now().time()
    logger.debug('query time range %s and %s' %
                 (str(time_since), str(time_until)))

    if pvi_info == PVIQueryInfo.Energy_Today:
        queryset = RegData.objects.filter(
            address=h5.INPUT_REGISTER['Today Wh'][
                h5.REGISTER_ADDRESS_COL]).filter(
                    pvi_name=pvi_name).values('prob_date').annotate(
                        Max('value')).order_by('prob_date')
        total = len(queryset)
        if (total > 0):
            t_date = queryset[total - 1]['prob_date']
            if t_date == datetime.now().date():
                value = queryset[total - 1].get('value__max')
                logger.debug('return %d' % (value * 10))
                return (value * 10)
        else:
            logger.error('empty query result returned')
            return 0
    elif pvi_info == PVIQueryInfo.Energy_This_Month:
        last_month_end_date = date(datetime.now().year,
                                   datetime.now().month,
                                   1) + timedelta(days=-1)
        queryset = RegData.objects.filter(
            address=h5.INPUT_REGISTER['Today Wh'][h5.REGISTER_ADDRESS_COL]
        ).filter(pvi_name=pvi_name).filter(
            prob_date__gt=last_month_end_date).values('prob_date').annotate(
                Max('value')).order_by('prob_date')
        value = 0
        if len(queryset) > 0:
            for entry in queryset:
                value += entry.get('value__max')
            logger.debug('return %d' % (value * 10))
            return (value * 10)
        else:
            logger.error('empty query result returned')
            return 0
    elif pvi_info == PVIQueryInfo.Energy_Until_Now:
        queryset = RegData.objects.filter(
            address=h5.INPUT_REGISTER['DC Life Wh'][h5.REGISTER_ADDRESS_COL]
        ).filter(pvi_name=pvi_name).order_by('-date')
        if len(queryset) > 0:
            value = queryset[0].value * 10
            logger.debug('return %d' % (value))
            return (value)
        else:
            logger.error('empty query result returned')
            return 0
    elif pvi_info == PVIQueryInfo.Energy_Hourly_List:
        return pvi_query_info_energy_hourly_list()

    elif pvi_info == PVIQueryInfo.Energy_Daily_List:
        return pvi_query_info_energy_daily_list()

    elif pvi_info == PVIQueryInfo.AC_Output_Voltage:
        queryset = RegData.objects.filter(
            address=h5.INPUT_REGISTER['Voltage'][h5.REGISTER_ADDRESS_COL]
        ).filter(pvi_name=pvi_name).filter(
            prob_date__exact=datetime.now().date()).filter(
                prob_time__range=[time_since, time_until]).order_by('-date')
        if len(queryset) > 0:
            value = round(queryset[0].value * 0.1, 1)
            logger.debug('return %d' % (value))
            return (value)
        else:
            logger.error('empty query result returned')
    elif pvi_info == PVIQueryInfo.AC_Output_Current:
        queryset = RegData.objects.filter(
            address=h5.INPUT_REGISTER['Current'][h5.REGISTER_ADDRESS_COL]
        ).filter(pvi_name=pvi_name).filter(
            prob_date__exact=datetime.now().date()).filter(
                prob_time__range=[time_since, time_until]).order_by('-date')
        if len(queryset) > 0:
            value = round(queryset[0].value * 0.01, 2)
            logger.debug('return %d' % (value))
            return (value)
        else:
            logger.error('empty query result returned')
    elif pvi_info == PVIQueryInfo.AC_Output_Wattage:
        queryset = RegData.objects.filter(
            address=h5.INPUT_REGISTER['Wattage'][h5.REGISTER_ADDRESS_COL]
        ).filter(pvi_name=pvi_name).filter(
            prob_date__exact=datetime.now().date()).filter(
                prob_time__range=[time_since, time_until]).order_by('-date')
        if len(queryset) > 0:
            value = queryset[0].value
            logger.debug('return %d' % (value))
            return (value)
        else:
            logger.error('empty query result returned')
    else:
        logger.error('unknow query pvi_info %s' % pvi_info)
Beispiel #5
0
def pvi_query_info_energy_hourly_list():
    '''
    provide function for query_pvi_info on PVIQueryInfo.Energy_Hourly_List
    '''
    date_since = (
        datetime.now() +
        timedelta(hours=-pvi.MAX_QUERY_ENERGY_HOURLY_LIST_LEN)).date()
    queryset = RegData.objects.filter(
        address=h5.INPUT_REGISTER['Today Wh'][h5.REGISTER_ADDRESS_COL]).filter(
            prob_date__gt=date_since).values(
                'prob_date', 'prob_hour').annotate(Max('value')).order_by(
                    '-prob_date', '-prob_hour')
    logger.debug('sql cmd: %s' % str(queryset.query))
    info = []
    logger.debug('queryset count %d' % queryset.count())
    max_report_len = pvi.MAX_QUERY_ENERGY_HOURLY_LIST_LEN + 1  # last 48 hours
    if queryset.count() < max_report_len:
        max_report_len = queryset.count()
    for entry in queryset[:max_report_len]:
        #logger.debug(entry['prob_date'])
        #logger.debug(entry['prob_hour'])
        t_hour = entry['prob_hour']
        t_time = time(t_hour, 0, 0)
        #logger.debug(str(t_time))
        info.append([
            datetime.combine(entry['prob_date'], t_time), entry['value__max']
        ])
    logger.debug('query return:\n%s' % str(info))
    info.sort(key=lambda x: x[0])

    if len(info) > 0:
        info = [[entry[0], entry[1] * 10] for entry in info]
    else:
        logger.warning('no energy sample data in database')
        this_hour_time = datetime.combine(datetime.now().date(),
                                          time(datetime.now().hour, 0, 0))
        for i in range(pvi.MAX_QUERY_ENERGY_HOURLY_LIST_LEN):
            info.append([this_hour_time, 0])
            this_hour_time -= timedelta(hours=1)

    info.reverse()
    dataset = info
    info = [[dataset[i][0], dataset[i][1] - dataset[i + 1][1]]
            for i in range(len(dataset) - 2)
            if dataset[i][0].date() == dataset[i + 1][0].date()]
    info.reverse()

    #-> insert zero energy value for missing hour
    dataset = []
    if len(info) > 0:
        dataset.append(info[0])
        t_date = info[0][0]
        i = 1
        while i < len(info):
            t_date = t_date + timedelta(hours=+1)
            if t_date < info[i][0]:
                dataset.append([t_date, 0])
            else:
                dataset.append(info[i])
                i += 1
        dataset.sort(key=lambda x: x[0])

    return dataset
def global_var(request):
    user = request.user

    # ---------------- number items in wishlist
    if user.is_authenticated:
        number_items_wish_list = WishList.objects.filter(
            user__user=user).count()
    else:
        number_items_wish_list = 0

    # ---------------- is supplier
    supplier = False
    store = None
    if user.is_authenticated:
        profil = user.profil
        if profil.is_professional and profil.is_supplier:
            if Shop.objects.filter(owner=profil).exists():
                store = get_object_or_404(Shop, owner=profil).id
                supplier = True

    # ---------------- is seller
    seller = False
    if user.is_authenticated:
        profil = user.profil
        if profil.is_seller:
            seller = True

    # ---------------- number items in compare
    if user.is_authenticated:
        number_items_compare = Compare.objects.filter(user__user=user).count()
    else:
        number_items_compare = 0

    # --------------- All Categories ---------------
    categories = CommerceCategory.objects.all()

    # --------------- Hot Categories ---------------
    hot_categories = list()
    featured_sale = Sale.objects.filter(date_end__gte=datetime.date.today()).values(
        'product__cat__category_two__category_one__category') \
                        .annotate(Max('percentage')).order_by('-percentage__max')[:6]
    for el in featured_sale:
        cat = CommerceCategory.objects.get(
            pk=el["product__cat__category_two__category_one__category"])
        hot_categories.append(cat)
        el["product__cat__category_two__category_one__category"] = cat
    # Complete 4 categories
    count = len(hot_categories)
    if count < 4:
        for i in range(0, CommerceCategory.objects.count()):
            category_to_add = CommerceCategory.objects.all()[i]
            if category_to_add not in hot_categories:
                hot_categories.append(category_to_add)
            if len(hot_categories) == 4:
                break
    hot_categories = hot_categories

    # --------------- All Tags ---------------
    tags = Tag.objects.all()

    # --------------- Cart ---------------
    my_cart_result = my_cart(user)
    number_products_in_cart = my_cart_result['number_products_in_cart']
    total_price_in_cart = my_cart_result['total_price_in_cart']
    cart_result = my_cart_result['cart']

    context = {
        # for base.html
        'categories': categories,
        'number_items_wish_list': number_items_wish_list,
        'number_items_compare': number_items_compare,
        'hot_categories': hot_categories[:4],
        'featured_sale': featured_sale,
        'tags': tags,
        'cart': cart_result,
        'total_price_in_cart': total_price_in_cart,
        'number_products_in_cart': number_products_in_cart,
        'is_supplier': supplier,
        'is_seller': seller,
        'store_id': store
    }

    return context
Beispiel #7
0
def exam_records_ordered(exam):
	""" Custome Template Tag takes exam object and returns ordered queryset according to student degrees """
	records=ExamRecord.objects.filter(exam=exam).annotate(top_stud=Max('student_degree')).order_by('-top_stud')
	return records
Beispiel #8
0
def signoff_details(request, locale_code, app_code):
    """Details pane loaded on sign-off on a particular revision.

    Requires 'rev' in the query string, supports explicitly passing a 'run'.
    """
    try:
        # rev query arg is required, it's not a url param for caching, and because it's dynamic
        # in the js code, so the {% url %} tag prefers this
        rev = request.GET['rev']
    except:
        raise Http404
    try:
        # there might be a specified run parameter
        runid = int(request.GET['run'])
    except:
        runid = None
    appver = get_object_or_404(AppVersion, code=app_code)
    lang = get_object_or_404(Locale, code=locale_code)
    forest = appver.tree.l10n
    repo = get_object_or_404(Repository, locale=lang, forest=forest)

    run = lastrun = None
    good = False
    try:
        cs = repo.changesets.get(revision__startswith=rev)
    except Changeset.DoesNotExist:
        cs = None
    if cs is not None:
        runs = Run.objects.order_by('-pk').filter(tree=appver.tree_id,
                                                  locale=lang,
                                                  revisions=cs)
        if runid is not None:
            try:
                run = runs.get(id=runid)
            except Run.DoesNotExist:
                pass
        try:
            lastrun = runs[0]
        except IndexError:
            pass
        good = lastrun and (lastrun.errors == 0) and (lastrun.allmissing == 0)

        # check if we have a newer signoff.
        push = cs.pushes.get(repository=repo)
        sos = appver.signoffs.filter(locale=lang, push__gte=push)
        sos = list(sos.annotate(la=Max('action')))
        doubled = None
        newer = []
        if len(sos):
            s2a = dict((so.id, so.la) for so in sos)
            actions = Action.objects.filter(id__in=s2a.values())
            actions = dict(
                (a.signoff_id, a.get_flag_display()) for a in actions)
            for so in sos:
                if so.push_id == push.id:
                    doubled = True
                    good = False
                else:
                    flag = actions[so.id]
                    if flag not in newer:
                        newer.append(flag)
                        good = False
            newer = sorted(newer)

    return render_to_response('shipping/signoff-details.html', {
        'run': run,
        'good': good,
        'doubled': doubled,
        'newer': newer,
    })
Beispiel #9
0
 def max_bid(self):
     return self.bid_set.all().aggregate(max_bid=Max('value'), buyer=Max('placer'))
    def test_populate_line_item_daily_summary_table(self, mock_vacuum):
        """Test that the line item daily summary table populates."""
        self.tearDown()
        report_table_name = OCP_REPORT_TABLE_MAP['report']
        summary_table_name = OCP_REPORT_TABLE_MAP['line_item_daily_summary']

        report_table = getattr(self.accessor.report_schema, report_table_name)
        summary_table = getattr(self.accessor.report_schema,
                                summary_table_name)

        for _ in range(25):
            self.creator.create_ocp_usage_line_item(self.reporting_period,
                                                    self.report)

        report_entry = report_table.objects.all().aggregate(
            Min('interval_start'), Max('interval_start'))
        start_date = report_entry['interval_start__min']
        end_date = report_entry['interval_start__max']

        start_date = start_date.replace(hour=0,
                                        minute=0,
                                        second=0,
                                        microsecond=0)
        end_date = end_date.replace(hour=0, minute=0, second=0, microsecond=0)

        query = self.accessor._get_db_obj_query(summary_table_name)
        initial_count = query.count()

        self.accessor.populate_line_item_daily_table(start_date, end_date,
                                                     self.cluster_id)
        self.accessor.populate_line_item_daily_summary_table(
            start_date, end_date, self.cluster_id)

        self.assertNotEqual(query.count(), initial_count)

        summary_entry = summary_table.objects.all().aggregate(
            Min('usage_start'), Max('usage_start'))
        result_start_date = summary_entry['usage_start__min']
        result_end_date = summary_entry['usage_start__max']

        self.assertEqual(result_start_date, start_date)
        self.assertEqual(result_end_date, end_date)

        entry = query.first()

        summary_columns = [
            'cluster_id',
            'namespace',
            'node',
            'node_capacity_cpu_core_hours',
            'node_capacity_cpu_cores',
            'node_capacity_memory_gigabyte_hours',
            'node_capacity_memory_gigabytes',
            'pod',
            'pod_labels',
            'pod_limit_cpu_core_hours',
            'pod_limit_memory_gigabyte_hours',
            'pod_request_cpu_core_hours',
            'pod_request_memory_gigabyte_hours',
            'pod_usage_cpu_core_hours',
            'pod_usage_memory_gigabyte_hours',
            'usage_end',
            'usage_start',
        ]

        for column in summary_columns:
            self.assertIsNotNone(getattr(entry, column))
Beispiel #11
0
 def get_week_top10_users(self):
     users = self.session_week.values("user") \
                  .annotate(total=Count("user")) \
                  .annotate(last=Max("date_start")).order_by("-total")[:10]
     return users
Beispiel #12
0
 def get_week_top10_assets(self):
     assets = self.session_week.values("asset")\
         .annotate(total=Count("asset"))\
         .annotate(last=Max("date_start")).order_by("-total")[:10]
     return assets