Ejemplo n.º 1
0
def search_geo_times(date_or_partial_date, permit_requests_qs, limit=None):
    def field_label(field):
        if field == "starts_at":
            return _("Date de début")
        elif field == "ends_at":
            return _("Date de fin")

        return ""

    start_date_filter = date_to_filters("starts_at", date_or_partial_date)
    end_date_filter = date_to_filters("ends_at", date_or_partial_date)

    qs = (models.PermitRequestGeoTime.objects.filter(
        Q(**start_date_filter) | Q(**end_date_filter),
        permit_request__in=permit_requests_qs,
    ).annotate(
        matching_date=Case(
            When(
                **start_date_filter,
                then=F("starts_at__date"),
            ),
            default=F("ends_at__date"),
            output_field=DateField(),
        ),
        matching_date_field=Case(
            When(
                **start_date_filter,
                then=Value("starts_at"),
            ),
            default=Value("ends_at"),
            output_field=CharField(),
        ),
        author_full_name=Concat(
            "permit_request__author__user__first_name",
            Value(" "),
            "permit_request__author__user__last_name",
        ),
    ).values(
        "permit_request_id",
        "permit_request__status",
        "permit_request__created_at",
        "matching_date",
        "matching_date_field",
        "author_full_name",
    ).annotate(nb_per_permit_request=Count("permit_request_id")).order_by(
        "-permit_request__created_at"))

    if limit is not None:
        qs = qs[:limit]

    return [
        SearchResult(
            permit_request_id=result["permit_request_id"],
            permit_request_status=result["permit_request__status"],
            permit_request_created_at=result["permit_request__created_at"],
            author_name=result["author_full_name"],
            field_label=field_label(result["matching_date_field"]),
            field_value=result["matching_date"].strftime("%d.%m.%Y"),
            score=1,
            match_type=MatchType.TIME,
        ) for result in qs
    ]
Ejemplo n.º 2
0
 def get_queryset(self):
     queryset = super().get_queryset()
     queryset = queryset.annotate(asset_display=Concat(
         F('asset__hostname'), Value('('), F('asset__ip'), Value(')')))
     return queryset
Ejemplo n.º 3
0
 def annotate_full_name(self, qs):
     qs = qs.annotate(full_name=Concat(F('author__first_name'),
                                       Value(' '),
                                       F('author__last_name'),
                                       output_field=CharField()))
     return qs
Ejemplo n.º 4
0
def list_(request):
    if request.session.get('state', None) == 'success':
        prompt = 'success'
    request.session['state'] = None

    project_list = get_project_list()
    has_sub_object = True
    is_admin = check_admin(request.user)

    page = request.GET.get('page')
    size = request.GET.get('size', request.COOKIES.get('size'))
    search_text = request.GET.get('search_text', '')
    order_by = request.GET.get('order_by', 'pk')
    order_by_reverse = request.GET.get('order_by_reverse', 'True')
    all_ = request.GET.get('all_', 'False')
    search_project = request.GET.get('search_project', None)

    page = change_to_positive_integer(page, 1)
    size = change_to_positive_integer(size, 10)
    if order_by_reverse == 'True':
        order_by_reverse = True
    else:
        order_by_reverse = False
    if all_ == 'False':
        all_ = False
    else:
        all_ = True
    if search_project in ('', 'None'):
        search_project = None

    q = get_query_condition(search_text)
    q &= Q(is_active=True)
    if not all_:
        q &= Q(creator=request.user)
    if search_project:
        q &= Q(project=search_project)

    objects = Case.objects.filter(q).values(
        'pk', 'uuid', 'name', 'keyword', 'project__name', 'creator',
        'creator__username',
        'modified_date').annotate(real_name=Concat('creator__last_name',
                                                   'creator__first_name',
                                                   output_field=CharField()))
    m2m_objects = Case.objects.filter(
        is_active=True,
        step__is_active=True).values('pk').annotate(m2m_count=Count('step'))
    m2m_count = {o['pk']: o['m2m_count'] for o in m2m_objects}

    # 获取被调用次数
    reference_objects1 = Case.objects.filter(
        is_active=True, suite__is_active=True).values('pk').annotate(
            reference_count=Count('*'))
    reference_count1 = {
        o['pk']: o['reference_count']
        for o in reference_objects1
    }
    reference_objects2 = Step.objects.filter(
        is_active=True, action__code='OTHER_CALL_SUB_CASE').values(
            'other_sub_case_id').annotate(reference_count=Count(
                '*')).order_by()  # 此处加上order_by是因为step默认按修改时间排序,会导致分组加入修改时间
    reference_count2 = {
        o['other_sub_case_id']: o['reference_count']
        for o in reference_objects2
    }

    for o in objects:
        # 添加子对象数量
        o['m2m_count'] = m2m_count.get(o['pk'], 0)
        # 添加被调用次数
        o['reference_count'] = reference_count1.get(
            o['pk'], 0) + reference_count2.get(o['pk'], 0)

    # 排序
    if objects:
        if order_by not in objects[0]:
            order_by = 'pk'
        objects = sorted(objects,
                         key=lambda x: x[order_by],
                         reverse=order_by_reverse)
    # 分页
    paginator = Paginator(objects, size)
    try:
        objects = paginator.page(page)
    except PageNotAnInteger:
        objects = paginator.page(1)
        page = 1
    except EmptyPage:
        objects = paginator.page(paginator.num_pages)
        page = paginator.num_pages
    order_by_form = OrderByForm(initial={
        'order_by': order_by,
        'order_by_reverse': order_by_reverse
    })
    paginator_form = PaginatorForm(initial={
        'page': page,
        'size': size
    },
                                   page_max_value=paginator.num_pages)
    # 设置cookie
    cookies = [Cookie('size', size, path=request.path)]
    respond = render(request, 'main/case/list.html', locals())
    for cookie in cookies:
        respond.set_cookie(cookie.key, cookie.value, cookie.max_age,
                           cookie.expires, cookie.path)
    return respond
Ejemplo n.º 5
0
def details_url_annotate(
        queryset, app_path=None,
        model_path=None, param_name=None, param_lookup_name=None):
    """
    annotate each row in a `queryset` with an admin link to related records

    For example, we have a hosts and host events and host events linked via a
    :class:`Django Foreign Key
    <django.db.models.ForeignKey>` field. This function will annotate each row
    in the hosts `queryset` with the absolute `Django admin URL` to the related
    host events like below::

        http://10.2.50.35:8080/admin/citrus_borg/winlogevent/"""\
            """?source_host__host_name=bccss-t450s-02

    :arg queryset: the initial :class:`Django queryset
        <django.db.models.query.QuerySet>`

    :arg str app_path: the `app_label` for the model with the details.
        If ``None``, the `app_label` is picked from the `queryset` using the
        `Model _meta API
        <https://docs.djangoproject.com/en/2.2/ref/models/meta/"""\
        """#module-django.db.models.options>`_.
        This is only useful if the master and the details models are defined in
        the same `Django application`.

    :arg str model_path: the `model_name` property of the `Django model` with
        the details. If ``None``, it will be picked from the `queryset` using
        the `Model _meta API`. This is of very little use, since there are
        very few realistic data models where the master and the details are in
        the same `Django model`.

    :arg str param_name: `Django lookup` key_name__field_name used to build the
        `param` part of the `URL`. If one considers the example above, this
        argument is 'source_host__host_name' which means that the events
        (the details) model has a foreign key named 'source_host' pointing to
        the hosts (master) model and the lookup will be applied against a
        field named 'host_name' in the later model.

    :arg str param_lookup_name: the name of the field that contains the `value`
        part of the `URL`. If one considers the example above, the value used
        in the filter is picked from the field containing the host_name.
    """
    if not isinstance(queryset, QuerySet):
        raise TypeError(
            'bad type %s for object %s' % (type(queryset), queryset))

    if param_name is None:
        raise ValueError(
            'cannot build URL parameters without a parameter name')

    if param_lookup_name is None:
        raise ValueError(
            'cannot build URL parameters without a parameter value')

    obj_sample = queryset.model._meta

    if app_path is None:
        app_path = obj_sample.app_label

    if model_path is None:
        model_path = obj_sample.model_name

    try:
        return queryset.annotate(details_url=Concat(
            Value(settings.SERVER_PROTO), Value('://'),
            Value(socket.getfqdn()),
            Value(':'), Value(settings.SERVER_PORT),
            Value('/admin/'),
            Value(app_path), Value(
                '/'), Value(model_path),
            Value('/?'), Value(param_name), Value('='), F(param_lookup_name),
            output_field=URLField()))
    except Exception as error:
        raise error
Ejemplo n.º 6
0
def get_group_permission_codes(group: Group) -> "QuerySet":
    """Return group permissions in the format '<app label>.<permission codename>'."""
    return group.permissions.annotate(formated_codename=Concat(
        "content_type__app_label", Value("."), "codename")).values_list(
            "formated_codename", flat=True)
Ejemplo n.º 7
0
def get_rune_report(qs, total_log_count):
    if qs.count() == 0:
        return None

    # Substat distribution
    # Unable to use database aggregation on an ArrayField without ORM gymnastics, so post-process data in python
    all_substats = qs.annotate(
        flat_substats=Func(F('substats'), function='unnest')).values_list(
            'flat_substats', flat=True)
    substat_counts = Counter(all_substats)

    # Sell value ranges
    min_value, max_value = qs.aggregate(Min('value'), Max('value')).values()
    min_value = int(floor_to_nearest(min_value, 1000))
    max_value = int(ceil_to_nearest(max_value, 1000))

    return {
        'stars': {
            'type':
            'occurrences',
            'total':
            qs.count(),
            'data':
            transform_to_dict(
                list(
                    qs.values(grade=Concat(Cast(
                        'stars', CharField()), Value('⭐'))).annotate(
                            count=Count('pk')).order_by('-count'))),
        },
        'type': {
            'type':
            'occurrences',
            'total':
            qs.count(),
            'data':
            transform_to_dict(
                replace_value_with_choice(
                    list(
                        qs.values('type').annotate(
                            count=Count('pk')).order_by('-count')),
                    {'type': qs.model.TYPE_CHOICES})),
        },
        'quality': {
            'type':
            'occurrences',
            'total':
            qs.count(),
            'data':
            transform_to_dict(
                replace_value_with_choice(
                    list(
                        qs.values('quality').annotate(
                            count=Count('pk')).order_by('-count')),
                    {'quality': qs.model.QUALITY_CHOICES})),
        },
        'slot': {
            'type':
            'occurrences',
            'total':
            qs.count(),
            'data':
            transform_to_dict(
                list(
                    qs.values('slot').annotate(
                        count=Count('pk')).order_by('-count'))),
        },
        'main_stat': {
            'type':
            'occurrences',
            'total':
            qs.count(),
            'data':
            transform_to_dict(
                replace_value_with_choice(
                    list(
                        qs.values('main_stat').annotate(
                            count=Count('main_stat')).order_by('main_stat')),
                    {'main_stat': qs.model.STAT_CHOICES}))
        },
        'innate_stat': {
            'type':
            'occurrences',
            'total':
            qs.count(),
            'data':
            transform_to_dict(
                replace_value_with_choice(
                    list(
                        qs.values('innate_stat').annotate(
                            count=Count('pk')).order_by('innate_stat')),
                    {'innate_stat': qs.model.STAT_CHOICES}))
        },
        'substats': {
            'type':
            'occurrences',
            'total':
            len(all_substats),
            'data':
            transform_to_dict(
                replace_value_with_choice(
                    sorted([{
                        'substat': k,
                        'count': v
                    } for k, v in substat_counts.items()],
                           key=lambda count: count['substat']),
                    {'substat': qs.model.STAT_CHOICES}), )
        },
        'max_efficiency': {
            'type':
            'histogram',
            'width':
            5,
            'data':
            histogram(qs,
                      'max_efficiency',
                      range(0, 100, 5),
                      slice_on='quality'),
        },
        'value': {
            'type':
            'histogram',
            'width':
            500,
            'data':
            histogram(qs,
                      'value',
                      range(min_value, max_value, 500),
                      slice_on='quality')
        }
    }
Ejemplo n.º 8
0
def slowquery_review_history(request):
    instance_name = request.POST.get('instance_name')
    # 服务端权限校验
    try:
        user_instances(request.user, 'all').get(instance_name=instance_name)
    except Exception:
        result = {'status': 1, 'msg': '你所在组未关联该实例', 'data': []}
        return HttpResponse(json.dumps(result),
                            content_type='application/json')

    # 判断是RDS还是其他实例
    instance_info = Instance.objects.get(instance_name=instance_name)
    if len(
            AliyunRdsConfig.objects.filter(instance_name=instance_name,
                                           is_enable=1)) > 0:
        # 调用阿里云慢日志接口
        result = aliyun_rds_slowquery_review_history(request)
    else:
        start_time = request.POST.get('StartTime')
        end_time = request.POST.get('EndTime')
        db_name = request.POST.get('db_name')
        sql_id = request.POST.get('SQLId')
        limit = int(request.POST.get('limit'))
        offset = int(request.POST.get('offset'))

        # 时间处理
        end_time = datetime.datetime.strptime(
            end_time, '%Y-%m-%d') + datetime.timedelta(days=1)
        limit = offset + limit
        # SQLId、DBName非必传
        if sql_id:
            # 获取慢查明细数据
            slowsql_record_obj = SlowQueryHistory.objects.filter(
                hostname_max=(instance_info.host + ':' +
                              str(instance_info.port)),
                checksum=sql_id,
                ts_min__range=(start_time, end_time)).annotate(
                    ExecutionStartTime=F(
                        'ts_min'),  # 本次统计(每5分钟一次)该类型sql语句出现的最小时间
                    DBName=F('db_max'),  # 数据库名
                    HostAddress=Concat(V('\''), 'user_max', V('\''), V('@'),
                                       V('\''), 'client_max', V('\'')),  # 用户名
                    SQLText=F('sample'),  # SQL语句
                    TotalExecutionCounts=F('ts_cnt'),  # 本次统计该sql语句出现的次数
                    QueryTimePct95=F('query_time_pct_95'),  # 本次统计该sql语句95%耗时
                    QueryTimes=F('query_time_sum'),  # 本次统计该sql语句花费的总时间(秒)
                    LockTimes=F('lock_time_sum'),  # 本次统计该sql语句锁定总时长(秒)
                    ParseRowCounts=F('rows_examined_sum'),  # 本次统计该sql语句解析总行数
                    ReturnRowCounts=F('rows_sent_sum')  # 本次统计该sql语句返回总行数
                ).values('ExecutionStartTime', 'DBName', 'HostAddress',
                         'SQLText', 'TotalExecutionCounts', 'QueryTimePct95',
                         'QueryTimes', 'LockTimes', 'ParseRowCounts',
                         'ReturnRowCounts')[offset:limit]

            slowsql_obj_count = SlowQueryHistory.objects.filter(
                hostname_max=(instance_info.host + ':' +
                              str(instance_info.port)),
                checksum=sql_id,
                ts_min__range=(start_time, end_time)).count()
        else:
            if db_name:
                # 获取慢查明细数据
                slowsql_record_obj = SlowQueryHistory.objects.filter(
                    hostname_max=(instance_info.host + ':' +
                                  str(instance_info.port)),
                    db_max=db_name,
                    ts_min__range=(start_time, end_time)
                ).annotate(
                    ExecutionStartTime=F(
                        'ts_min'),  # 本次统计(每5分钟一次)该类型sql语句出现的最小时间
                    DBName=F('db_max'),  # 数据库名
                    HostAddress=Concat(V('\''), 'user_max', V('\''), V('@'),
                                       V('\''), 'client_max', V('\'')),
                    # 用户名
                    SQLText=F('sample'),  # SQL语句
                    TotalExecutionCounts=F('ts_cnt'),  # 本次统计该sql语句出现的次数
                    QueryTimePct95=F('query_time_pct_95'),  # 本次统计该sql语句出现的次数
                    QueryTimes=F('query_time_sum'),  # 本次统计该sql语句花费的总时间(秒)
                    LockTimes=F('lock_time_sum'),  # 本次统计该sql语句锁定总时长(秒)
                    ParseRowCounts=F('rows_examined_sum'),  # 本次统计该sql语句解析总行数
                    ReturnRowCounts=F('rows_sent_sum')  # 本次统计该sql语句返回总行数
                ).values('ExecutionStartTime', 'DBName', 'HostAddress',
                         'SQLText', 'TotalExecutionCounts', 'QueryTimePct95',
                         'QueryTimes', 'LockTimes', 'ParseRowCounts',
                         'ReturnRowCounts')[offset:limit]  # 执行总次数倒序排列

                slowsql_obj_count = SlowQueryHistory.objects.filter(
                    hostname_max=(instance_info.host + ':' +
                                  str(instance_info.port)),
                    db_max=db_name,
                    ts_min__range=(start_time, end_time)).count()
            else:
                # 获取慢查明细数据
                slowsql_record_obj = SlowQueryHistory.objects.filter(
                    hostname_max=(instance_info.host + ':' +
                                  str(instance_info.port)),
                    ts_min__range=(start_time, end_time)
                ).annotate(
                    ExecutionStartTime=F(
                        'ts_min'),  # 本次统计(每5分钟一次)该类型sql语句出现的最小时间
                    DBName=F('db_max'),  # 数据库名
                    HostAddress=Concat(V('\''), 'user_max', V('\''), V('@'),
                                       V('\''), 'client_max', V('\'')),
                    # 用户名
                    SQLText=F('sample'),  # SQL语句
                    TotalExecutionCounts=F('ts_cnt'),  # 本次统计该sql语句出现的次数
                    QueryTimePct95=F('query_time_pct_95'),  # 本次统计该sql语句95%耗时
                    QueryTimes=F('query_time_sum'),  # 本次统计该sql语句花费的总时间(秒)
                    LockTimes=F('lock_time_sum'),  # 本次统计该sql语句锁定总时长(秒)
                    ParseRowCounts=F('rows_examined_sum'),  # 本次统计该sql语句解析总行数
                    ReturnRowCounts=F('rows_sent_sum')  # 本次统计该sql语句返回总行数
                ).values('ExecutionStartTime', 'DBName', 'HostAddress',
                         'SQLText', 'TotalExecutionCounts', 'QueryTimePct95',
                         'QueryTimes', 'LockTimes', 'ParseRowCounts',
                         'ReturnRowCounts')[offset:limit]  # 执行总次数倒序排列

                slowsql_obj_count = SlowQueryHistory.objects.filter(
                    hostname_max=(instance_info.host + ':' +
                                  str(instance_info.port)),
                    ts_min__range=(start_time, end_time)).count()
        # QuerySet 序列化
        sql_slow_record = [SlowRecord for SlowRecord in slowsql_record_obj]
        result = {"total": slowsql_obj_count, "rows": sql_slow_record}

        # 返回查询结果
    return HttpResponse(json.dumps(result,
                                   cls=ExtendJSONEncoder,
                                   bigint_as_string=True),
                        content_type='application/json')
Ejemplo n.º 9
0
 def get_display(self):
     return self.annotate(display_name=Concat('client_name', V(
         ' ('), 'gst', V(')'))).all().order_by('client_name')
Ejemplo n.º 10
0
class EC2InstanceAdmin(admin.ModelAdmin, CSVExporter):
    csv_fields = (
        'id',
        'hostname',
        'instance_type',
        'browser_type',
        'lead',
        'lead_status',
        'raspberry_pi',
        'version',
        'raspberry_pi_version',
        'status',
        'last_rdp_session',
        'last_seen',
        'last_troubleshoot_field',
        'tunnel_up_date_field',
        'raspberry_pi_online',
    )

    csv_titles = (
        'Id',
        'Hostname',
        'Instance Type',
        'Browser Type',
        'Lead',
        'Lead Status',
        'Raspberry Pi',
        'Version',
        'Raspberry Pi Version',
        'Status',
        'Last Rdp Session',
        'Last Seen',
        'Last Troubleshoot Field',
        'Tunnel Up Date Field',
        'Raspberry Pi Online',
    )

    class Media:
        css = {
            'all': ('css/custom_admin.css',)
        }

    model = EC2Instance
    list_display = (
        'id',
        'hostname',
        'instance_type',
        'browser_type',
        'lead_link',
        'lead_status',
        'raspberry_pi_link',
        'version',
        'raspberry_pi_version',
        'status',
        'last_rdp_session',
        'last_seen',
        'last_troubleshoot_field',
        'tunnel_up_date_field',
        'links',
        'raspberry_pi_online',
    )
    list_filter = (
        'status',
        'instance_type',
        'browser_type',
        'is_essential',
        TunnelUpListFilter,
        LeadStatusListFilter,
        LeadRaspberryPiOnlineListFilter,
        LeadRaspberryPiVersionListFilter,
        LastTroubleshootListFilter,
    )
    readonly_fields = ('created', 'updated', )
    search_fields = ('instance_id', 'email', 'rpid', 'lead__leadid', 'essential_key', )
    list_select_related = ('lead', 'lead__raspberry_pi', )
    actions = (
        'update_ec2_tags',
        'get_currect_state',
        'start',
        'stop',
        'restart_raspberry_pi',
        'clear_ping_cache',
        'terminate',
        'update_password',
        'upgrade_to_large',
        'launch_essential_ec2',
        'check_status',
        'export_as_csv',
    )
    raw_id_fields = ('lead', )

    def lead_link(self, obj):
        if obj.lead is None:
            return obj.email
        return mark_safe('<a href="{url}?leadid={q}">{lead}</a>'.format(
            url=reverse('admin:adsrental_lead_changelist'),
            lead=obj.lead.email,
            q=obj.lead.leadid,
        ))

    def lead_status(self, obj):
        return obj.lead and obj.lead.status

    def raspberry_pi_link(self, obj):
        if obj.lead is None or obj.lead.raspberry_pi is None:
            return obj.rpid
        return mark_safe('<a href="{url}?rpid={q}">{rpid}</a>'.format(
            url=reverse('admin:adsrental_raspberrypi_changelist'),
            rpid=obj.lead.raspberry_pi.rpid,
            q=obj.lead.raspberry_pi.rpid,
        ))

    def raspberry_pi_online(self, obj):
        return obj.lead and obj.lead.raspberry_pi and obj.lead.raspberry_pi.online()

    def raspberry_pi_version(self, obj):
        return obj.lead and obj.lead.raspberry_pi and obj.lead.raspberry_pi.version

    def last_seen(self, obj):
        raspberry_pi = obj.get_raspberry_pi()
        if not raspberry_pi:
            return None

        last_seen = raspberry_pi.get_last_seen()
        if not last_seen:
            return None

        return mark_safe(u'<span title="{}">{}</span>'.format(last_seen, naturaltime(last_seen)))

    def last_rdp_session(self, obj):
        if not obj.last_rdp_start:
            return None

        date = obj.last_rdp_start
        return mark_safe(u'<span title="{}">{}</span>'.format(date, naturaltime(date)))

    def last_troubleshoot_field(self, obj):
        if obj.last_troubleshoot is None:
            return 'Never'

        date = obj.last_troubleshoot
        return mark_safe(u'<span title="{}">{}</span>'.format(date, naturaltime(date)))

    def tunnel_up_date_field(self, obj):
        if obj.tunnel_up_date is None:
            return 'Never'

        date = obj.tunnel_up_date
        is_tunnel_up = obj.is_tunnel_up()
        return mark_safe(u'<span title="{}">{}</span>'.format(
            date,
            'Yes' if is_tunnel_up else naturaltime(date),
        ))

    def links(self, obj):
        links = []
        if obj.rpid:
            links.append('<a href="{url}">RDP</a>'.format(
                url=reverse('rdp_ec2_connect', kwargs=dict(rpid=obj.rpid)),
            ))
        if obj.lead:
            links.append('<a href="{url}">pi.conf</a>'.format(
                url=reverse('pi_config', kwargs=dict(rpid=obj.rpid)),
            ))
        if obj.lead and obj.lead.raspberry_pi:
            now = timezone.localtime(timezone.now())
            today_log_filename = '{}.log'.format(now.strftime(settings.LOG_DATE_FORMAT))
            links.append('<a href="{log_url}">Today log</a>'.format(
                log_url=reverse('show_log', kwargs={'rpid': obj.rpid, 'filename': today_log_filename}),
            ))
            links.append('<a href="{url}">Netstat</a>'.format(
                url=reverse('ec2_ssh_get_netstat', kwargs=dict(rpid=obj.rpid)),
            ))
            links.append('<a href="{url}">RTunnel</a>'.format(
                url=reverse('ec2_ssh_start_reverse_tunnel', kwargs=dict(rpid=obj.rpid)),
            ))

        links.append('<a href="#" title="ssh -o StrictHostKeyChecking=no -i ~/.ssh/farmbot Administrator@{hostname} -p 40594">Copy SSH</a>'.format(
            hostname=obj.hostname,
        ))
        return mark_safe(', '.join(links))

    def update_ec2_tags(self, request, queryset):
        for ec2_instance in queryset:
            ec2_instance.set_ec2_tags()

    def get_currect_state(self, request, queryset):
        if queryset.count() > 10:
            boto_resource = BotoResource()
            for ec2_boto in boto_resource.get_resource('ec2').instances.all():
                ec2 = EC2Instance.objects.filter(instance_id=ec2_boto.id).first()
                if ec2:
                    ec2.update_from_boto(ec2_boto)

        for ec2_instance in queryset:
            ec2_instance.update_from_boto()

    def start(self, request, queryset):
        for ec2_instance in queryset:
            ec2_instance.start()

    def stop(self, request, queryset):
        for ec2_instance in queryset:
            ec2_instance.stop()

    def restart_raspberry_pi(self, request, queryset):
        for ec2_instance in queryset:
            if ec2_instance.lead and ec2_instance.lead.raspberry_pi:
                ec2_instance.lead.raspberry_pi.restart_required = True
                ec2_instance.lead.raspberry_pi.save()
                PingCacheHelper().delete(ec2_instance.rpid)

    def clear_ping_cache(self, request, queryset):
        for ec2_instance in queryset:
            PingCacheHelper().delete(ec2_instance.rpid)

    def terminate(self, request, queryset):
        for ec2_instance in queryset:
            ec2_instance.terminate()

    def update_password(self, request, queryset):
        for ec2_instance in queryset:
            ec2_instance.change_password(ec2_instance.password)

    def upgrade_to_large(self, request, queryset):
        client = BotoResource().get_client('ec2')
        for ec2_instance in queryset:
            if ec2_instance.instance_type == EC2Instance.INSTANCE_TYPE_M5_LARGE:
                messages.warning(request, 'EC2 was already upgraded')
                continue
            ec2_instance.update_from_boto()
            if ec2_instance.status != EC2Instance.STATUS_STOPPED:
                messages.success(request, 'EC2 should be stopped first')
                continue

            client.modify_instance_attribute(InstanceId=ec2_instance.instance_id, Attribute='instanceType', Value=EC2Instance.INSTANCE_TYPE_M5_LARGE)
            ec2_instance.instance_type = EC2Instance.INSTANCE_TYPE_M5_LARGE
            ec2_instance.save()
            messages.success(request, 'EC2 is upgraded successfully')

    def launch_essential_ec2(self, request, queryset):
        ec2_instance = EC2Instance.launch_essential()
        messages.success(request, 'Essential EC2 {} is started successfully'.format(ec2_instance.essential_key))

    def check_status(self, request, queryset):
        for ec2 in queryset:
            if not ec2.lead:
                continue

            ec2_client = BotoResource().get_client('ec2')
            statuses = ec2_client.describe_instance_status(
                InstanceIds=[ec2.instance_id],
                IncludeAllInstances=True
            )
            if statuses['InstanceStatuses'][0]['InstanceStatus']['Status'] == 'impaired':
                ec2.lead = None
                ec2.rpid = 'OLD:' + ec2.rpid
                ec2.save()
                ec2.set_ec2_tags()
                ec2.stop()
                messages.info(request, 'Essential EC2 {} status is stopped and unassigned: {}'.format(ec2.rpid, statuses))
            else:
                messages.success(request, 'Essential EC2 {} status is okay: {}'.format(ec2.rpid, statuses))

    lead_link.short_description = 'Lead'
    lead_link.admin_order_field = Concat('lead__first_name', Value(' '), 'lead__last_name')

    raspberry_pi_link.short_description = 'RaspberryPi'

    raspberry_pi_version.short_description = 'RPi Version'

    raspberry_pi_online.boolean = True
    raspberry_pi_online.short_description = 'RPi Online'

    last_troubleshoot_field.short_description = 'Troubleshoot'
    last_troubleshoot_field.admin_order_field = 'last_troubleshoot'

    tunnel_up_date_field.short_description = 'Tunnel up'
    tunnel_up_date_field.admin_order_field = 'tunnel_up_date'

    last_seen.admin_order_field = 'lead__raspberry_pi__last_seen'

    clear_ping_cache.short_description = 'DEBUG: Clear ping cache'

    terminate.short_description = 'DEBUG: Terminate'

    upgrade_to_large.short_description = 'DEBUG: Upgrade to M5 Large instance'

    launch_essential_ec2.short_description = 'DEBUG: Launch essential EC2'

    check_status.short_description = 'DEBUG: Check status'
Ejemplo n.º 11
0
def award_financial_derivations(derived_fields):
    derived_fields["award_type_code"] = Coalesce(
        "award__latest_transaction__contract_data__contract_award_type",
        "award__latest_transaction__assistance_data__assistance_type",
    )
    derived_fields["award_type"] = Coalesce(
        "award__latest_transaction__contract_data__contract_award_type_desc",
        "award__latest_transaction__assistance_data__assistance_type_desc",
    )
    derived_fields["awarding_agency_code"] = Coalesce(
        "award__latest_transaction__contract_data__awarding_agency_code",
        "award__latest_transaction__assistance_data__awarding_agency_code",
    )
    derived_fields["awarding_agency_name"] = Coalesce(
        "award__latest_transaction__contract_data__awarding_agency_name",
        "award__latest_transaction__assistance_data__awarding_agency_name",
    )
    derived_fields["awarding_subagency_code"] = Coalesce(
        "award__latest_transaction__contract_data__awarding_sub_tier_agency_c",
        "award__latest_transaction__assistance_data__awarding_sub_tier_agency_c",
    )
    derived_fields["awarding_subagency_name"] = Coalesce(
        "award__latest_transaction__contract_data__awarding_sub_tier_agency_n",
        "award__latest_transaction__assistance_data__awarding_sub_tier_agency_n",
    )
    derived_fields["awarding_office_code"] = Coalesce(
        "award__latest_transaction__contract_data__awarding_office_code",
        "award__latest_transaction__assistance_data__awarding_office_code",
    )
    derived_fields["awarding_office_name"] = Coalesce(
        "award__latest_transaction__contract_data__awarding_office_name",
        "award__latest_transaction__assistance_data__awarding_office_name",
    )
    derived_fields["funding_agency_code"] = Coalesce(
        "award__latest_transaction__contract_data__funding_agency_code",
        "award__latest_transaction__assistance_data__funding_agency_code",
    )
    derived_fields["funding_agency_name"] = Coalesce(
        "award__latest_transaction__contract_data__funding_agency_name",
        "award__latest_transaction__assistance_data__funding_agency_name",
    )
    derived_fields["funding_sub_agency_code"] = Coalesce(
        "award__latest_transaction__contract_data__funding_sub_tier_agency_co",
        "award__latest_transaction__assistance_data__funding_sub_tier_agency_co",
    )
    derived_fields["funding_sub_agency_name"] = Coalesce(
        "award__latest_transaction__contract_data__funding_sub_tier_agency_na",
        "award__latest_transaction__assistance_data__funding_sub_tier_agency_na",
    )
    derived_fields["funding_office_code"] = Coalesce(
        "award__latest_transaction__contract_data__funding_office_code",
        "award__latest_transaction__assistance_data__funding_office_code",
    )
    derived_fields["funding_office_name"] = Coalesce(
        "award__latest_transaction__contract_data__funding_office_name",
        "award__latest_transaction__assistance_data__funding_office_name",
    )
    derived_fields["recipient_duns"] = Coalesce(
        "award__latest_transaction__contract_data__awardee_or_recipient_uniqu",
        "award__latest_transaction__assistance_data__awardee_or_recipient_uniqu",
    )
    derived_fields["recipient_name"] = Coalesce(
        "award__latest_transaction__contract_data__awardee_or_recipient_legal",
        "award__latest_transaction__assistance_data__awardee_or_recipient_legal",
    )
    derived_fields["recipient_parent_duns"] = Coalesce(
        "award__latest_transaction__contract_data__ultimate_parent_unique_ide",
        "award__latest_transaction__assistance_data__ultimate_parent_unique_ide",
    )
    derived_fields["recipient_parent_name"] = Coalesce(
        "award__latest_transaction__contract_data__ultimate_parent_legal_enti",
        "award__latest_transaction__assistance_data__ultimate_parent_legal_enti",
    )
    derived_fields["recipient_country"] = Coalesce(
        "award__latest_transaction__contract_data__legal_entity_country_code",
        "award__latest_transaction__assistance_data__legal_entity_country_code",
    )
    derived_fields["recipient_state"] = Coalesce(
        "award__latest_transaction__contract_data__legal_entity_state_code",
        "award__latest_transaction__assistance_data__legal_entity_state_code",
    )
    derived_fields["recipient_county"] = Coalesce(
        "award__latest_transaction__contract_data__legal_entity_county_name",
        "award__latest_transaction__assistance_data__legal_entity_county_name",
    )
    derived_fields["recipient_city"] = Coalesce(
        "award__latest_transaction__contract_data__legal_entity_city_name",
        "award__latest_transaction__assistance_data__legal_entity_city_name",
    )
    derived_fields["recipient_congressional_district"] = Coalesce(
        "award__latest_transaction__contract_data__legal_entity_congressional",
        "award__latest_transaction__assistance_data__legal_entity_congressional",
    )
    derived_fields["recipient_zip_code"] = Coalesce(
        "award__latest_transaction__contract_data__legal_entity_zip4",
        Concat(
            "award__latest_transaction__assistance_data__legal_entity_zip5",
            "award__latest_transaction__assistance_data__legal_entity_zip_last4",
        ),
    )
    derived_fields["primary_place_of_performance_country"] = Coalesce(
        "award__latest_transaction__contract_data__place_of_perf_country_desc",
        "award__latest_transaction__assistance_data__place_of_perform_country_n",
    )
    derived_fields["primary_place_of_performance_state"] = Coalesce(
        "award__latest_transaction__contract_data__place_of_perfor_state_desc",
        "award__latest_transaction__assistance_data__place_of_perform_state_nam",
    )
    derived_fields["primary_place_of_performance_county"] = Coalesce(
        "award__latest_transaction__contract_data__place_of_perform_county_na",
        "award__latest_transaction__assistance_data__place_of_perform_county_na",
    )
    derived_fields["primary_place_of_performance_congressional_district"] = Coalesce(
        "award__latest_transaction__contract_data__place_of_performance_congr",
        "award__latest_transaction__assistance_data__place_of_performance_congr",
    )
    derived_fields["primary_place_of_performance_zip_code"] = Coalesce(
        "award__latest_transaction__contract_data__place_of_performance_zip4a",
        "award__latest_transaction__assistance_data__place_of_performance_zip4a",
    )
    derived_fields["award_base_action_date_fiscal_year"] = FiscalYear(
        "award__date_signed")
    derived_fields["award_latest_action_date_fiscal_year"] = FiscalYear(
        "award__certified_date")
    derived_fields["usaspending_permalink"] = Case(
        When(
            **{
                "award__generated_unique_award_id__isnull":
                False,
                "then":
                ConcatAll(
                    Value(AWARD_URL),
                    Func(F("award__generated_unique_award_id"),
                         function="urlencode"), Value("/")),
            }),
        default=Value(""),
        output_field=CharField(),
    )

    return derived_fields
Ejemplo n.º 12
0
def slowquery_review_history(request):
    cluster_name = request.POST.get('cluster_name')

    # 判断是RDS还是其他实例
    cluster_info = master_config.objects.get(cluster_name=cluster_name)
    if len(AliyunRdsConfig.objects.filter(cluster_name=cluster_name)) > 0:
        if settings.ALIYUN_RDS_MANAGE:
            # 调用阿里云慢日志接口
            result = aliyun_rds_slowquery_review_history(request)
        else:
            raise Exception('未开启rds管理,无法查看rds数据!')
    else:
        StartTime = request.POST.get('StartTime')
        EndTime = request.POST.get('EndTime')
        DBName = request.POST.get('db_name')
        SQLId = request.POST.get('SQLId')
        limit = int(request.POST.get('limit'))
        offset = int(request.POST.get('offset'))

        # 时间处理
        EndTime = datetime.datetime.strptime(
            EndTime, '%Y-%m-%d') + datetime.timedelta(days=1)
        limit = offset + limit
        # SQLId、DBName非必传
        if SQLId:
            # 获取慢查明细数据
            slowsql_record_obj = SlowQueryHistory.objects.filter(
                hostname_max=(cluster_info.master_host + ':' +
                              str(cluster_info.master_port)),
                checksum=SQLId,
                ts_min__range=(StartTime, EndTime)).annotate(
                    ExecutionStartTime=F('ts_min'),  # 执行开始时间
                    DBName=F('db_max'),  # 数据库名
                    HostAddress=Concat(V('\''), 'user_max', V('\''), V('@'),
                                       V('\''), 'client_max', V('\'')),  # 用户名
                    SQLText=F('sample'),  # SQL语句
                    QueryTimes=F('query_time_sum'),  # 执行时长(秒)
                    LockTimes=F('lock_time_sum'),  # 锁定时长(秒)
                    ParseRowCounts=F('rows_examined_sum'),  # 解析行数
                    ReturnRowCounts=F('rows_sent_sum')  # 返回行数
                ).values('ExecutionStartTime', 'DBName', 'HostAddress',
                         'SQLText', 'QueryTimes', 'LockTimes',
                         'ParseRowCounts', 'ReturnRowCounts')[offset:limit]

            slowsql_obj_count = SlowQueryHistory.objects.filter(
                hostname_max=(cluster_info.master_host + ':' +
                              str(cluster_info.master_port)),
                checksum=SQLId,
                ts_min__range=(StartTime, EndTime)).count()
        else:
            if DBName:
                # 获取慢查明细数据
                slowsql_record_obj = SlowQueryHistory.objects.filter(
                    hostname_max=(cluster_info.master_host + ':' +
                                  str(cluster_info.master_port)),
                    db_max=DBName,
                    ts_min__range=(StartTime, EndTime)).annotate(
                        ExecutionStartTime=F('ts_min'),  # 执行开始时间
                        DBName=F('db_max'),  # 数据库名
                        HostAddress=Concat(V('\''), 'user_max', V('\''),
                                           V('@'), V('\''), 'client_max',
                                           V('\'')),  # 用户名
                        SQLText=F('sample'),  # SQL语句
                        QueryTimes=F('query_time_sum'),  # 执行时长(秒)
                        LockTimes=F('lock_time_sum'),  # 锁定时长(秒)
                        ParseRowCounts=F('rows_examined_sum'),  # 解析行数
                        ReturnRowCounts=F('rows_sent_sum')  # 返回行数
                    ).values('ExecutionStartTime', 'DBName', 'HostAddress',
                             'SQLText', 'QueryTimes', 'LockTimes',
                             'ParseRowCounts',
                             'ReturnRowCounts')[offset:limit]  # 执行总次数倒序排列

                slowsql_obj_count = SlowQueryHistory.objects.filter(
                    hostname_max=(cluster_info.master_host + ':' +
                                  str(cluster_info.master_port)),
                    db_max=DBName,
                    ts_min__range=(StartTime, EndTime)).count()
            else:
                # 获取慢查明细数据
                slowsql_record_obj = SlowQueryHistory.objects.filter(
                    hostname_max=(cluster_info.master_host + ':' +
                                  str(cluster_info.master_port)),
                    ts_min__range=(StartTime, EndTime)).annotate(
                        ExecutionStartTime=F('ts_min'),  # 执行开始时间
                        DBName=F('db_max'),  # 数据库名
                        HostAddress=F('user_max'),  # 用户名
                        SQLText=F('sample'),  # SQL语句
                        QueryTimes=F('query_time_sum'),  # 执行时长(秒)
                        LockTimes=F('lock_time_sum'),  # 锁定时长(秒)
                        ParseRowCounts=F('rows_examined_sum'),  # 解析行数
                        ReturnRowCounts=F('rows_sent_sum')  # 返回行数
                    ).values('ExecutionStartTime', 'DBName', 'HostAddress',
                             'SQLText', 'QueryTimes', 'LockTimes',
                             'ParseRowCounts',
                             'ReturnRowCounts')[offset:limit]  # 执行总次数倒序排列

                slowsql_obj_count = SlowQueryHistory.objects.filter(
                    hostname_max=(cluster_info.master_host + ':' +
                                  str(cluster_info.master_port)),
                    ts_min__range=(StartTime, EndTime)).count()
        # QuerySet 序列化
        SQLSlowRecord = [SlowRecord for SlowRecord in slowsql_record_obj]
        result = {"total": slowsql_obj_count, "rows": SQLSlowRecord}

        # 返回查询结果
    return HttpResponse(json.dumps(result,
                                   cls=ExtendJSONEncoder,
                                   bigint_as_string=True),
                        content_type='application/json')
Ejemplo n.º 13
0
    def get_queryset(self):
        queryset = Check.objects.annotate(
            checkStatusText=F("checkStatus__name"),
            borrowerName=Case(
                When(
                    Q(loan__borrower__recordType="BD"),
                    then=F("loan__borrower__business__tradeName"),
                ),
                When(
                    Q(loan__borrower__recordType="ID"),
                    then=Concat(
                        F("loan__borrower__individual__firstname"),
                        V(" "),
                        F("loan__borrower__individual__middlename"),
                        V(" "),
                        F("loan__borrower__individual__lastname"),
                    ),
                ),
            ),
        ).order_by("id")
        checkId = self.request.query_params.get("checkId", None)

        borrowerId = self.request.query_params.get("borrowerId", None)
        dateReceivedFrom = self.request.query_params.get(
            "dateReceivedFrom", None)
        dateReceivedTo = self.request.query_params.get("dateReceivedTo", None)
        pnNo = self.request.query_params.get("pnNo", None)
        bankBranch = self.request.query_params.get("bankBranch", None)
        checkNo = self.request.query_params.get("checkNo", None)
        accountNo = self.request.query_params.get("accountNo", None)
        checkDateFrom = self.request.query_params.get("checkDateFrom", None)
        checkDateTo = self.request.query_params.get("checkDateTo", None)
        amountFrom = self.request.query_params.get("amountFrom", None)
        amountTo = self.request.query_params.get("amountTo", None)
        checkStatus = self.request.query_params.get("checkStatus", None)

        if checkId is not None:
            queryset = queryset.filter(id=checkId)

        if borrowerId is not None:
            queryset = queryset.filter(loan__borrower=borrowerId)

        if dateReceivedFrom is not None and dateReceivedTo is not None:
            queryset = queryset.filter(
                dateReceived__date__gte=dateReceivedFrom).filter(
                    dateReceived__date__lte=dateReceivedTo)

        if pnNo is not None:
            queryset = queryset.filter(pnNo=pnNo)

        if bankBranch is not None:
            queryset = queryset.filter(bankBranch=bankBranch)

        if checkNo is not None:
            queryset = queryset.filter(checkNo=checkNo)

        if accountNo is not None:
            queryset = queryset.filter(accountNo=accountNo)

        if checkDateFrom is not None and checkDateTo is not None:
            queryset = queryset.filter(
                checkDate__date__gte=checkDateFrom).filter(
                    checkDate__date__lte=checkDateTo)

        if amountFrom is not None and amountTo is not None:
            queryset = queryset.filter(amount__gte=amountFrom).filter(
                amount__lte=amountTo)

        if checkStatus is not None:
            queryset = queryset.filter(checkStatus=checkStatus)

        return queryset
Ejemplo n.º 14
0
    def get_queryset(self):
        queryset = Payment.objects.annotate(
            borrowerName=Case(
                When(Q(loan__borrower__recordType="BD"),
                     then=F("loan__borrower__business__tradeName")),
                When(
                    Q(loan__borrower__recordType="ID"),
                    then=Concat(
                        F("loan__borrower__individual__firstname"),
                        V(" "),
                        F("loan__borrower__individual__middlename"),
                        V(" "),
                        F("loan__borrower__individual__lastname"),
                    ),
                ),
            ),
            pnNo=F("loan__pnNo"),
        ).order_by("id")
        paymentId = self.request.query_params.get("paymentId", None)

        borrowerId = self.request.query_params.get("borrowerId", None)
        principalFrom = self.request.query_params.get("principalFrom", None)
        principalTo = self.request.query_params.get("principalTo", None)

        interestFrom = self.request.query_params.get("interestFrom", None)
        interestTo = self.request.query_params.get("interestTo", None)
        accruedInterestFrom = self.request.query_params.get(
            "accruedInterestFrom", None)
        accruedInterestTo = self.request.query_params.get(
            "accruedInterestTo", None)

        paymentDateFrom = self.request.query_params.get(
            "paymentDateFrom", None)
        paymentDateTo = self.request.query_params.get("paymentDateTo", None)
        totalPaymentFrom = self.request.query_params.get(
            "totalPaymentFrom", None)
        totalPaymentTo = self.request.query_params.get("totalPaymentTo", None)
        paymentType = self.request.query_params.get("paymentType", None)
        status = self.request.query_params.get("status", None)

        if paymentId is not None:
            queryset = queryset.filter(id=paymentId)

        if borrowerId is not None:
            queryset = queryset.filter(borrower__borrowerId=borrowerId)

        if principalFrom is not None and principalTo is not None:
            queryset = queryset.filter(principal__gte=principalFrom).filter(
                principal__lte=principalTo)

        if interestFrom is not None and interestTo is not None:
            queryset = queryset.filter(
                interestPayment__gte=interestFrom).filter(
                    interestPayment__lte=interestTo)

        if accruedInterestFrom is not None and accruedInterestTo is not None:
            queryset = queryset.filter(
                accruedInterestPayment__gte=accruedInterestFrom).filter(
                    accruedInterestPayment__lte=accruedInterestTo)

        if paymentDateFrom is not None and paymentDateTo is not None:
            queryset = queryset.filter(
                datePayment__date__gte=paymentDateFrom).filter(
                    datePayment__date__lte=paymentDateTo)

        if totalPaymentFrom is not None and totalPaymentTo is not None:
            queryset = queryset.filter(total__gte=totalPaymentFrom).filter(
                total__lte=totalPaymentTo)

        if paymentType is not None:
            queryset = queryset.filter(paymentType__name=paymentType)

        if status is not None:
            queryset = queryset.filter(paymentStatus__name=status)

        for payment in queryset:
            payment.paidInterest = payment.interestPayment + payment.accruedInterestPayment + payment.additionalInterest

        return queryset
Ejemplo n.º 15
0
 def get_queryset(self):
     return super(
         UnidadeTramitacaoManager,
         self).get_queryset().annotate(nome_composto=Concat(
             'orgao__nome', 'comissao__sigla',
             'parlamentar__nome_parlamentar')).order_by('nome_composto')
Ejemplo n.º 16
0
    def get_data(self, request, purchase_order_id=None):
        data = {}
        purchase_order_lines = POMaker.PurchaseOrderLine.objects.annotate(
            product_name=F('product__name'))
        record = POMaker.PurchaseOrderMaker.objects.filter(
            pk=purchase_order_id).annotate(
                po_date=ExpressionWrapper(Func(F('date'),
                                               Value("DD/MM/YYYY"),
                                               function='TO_CHAR'),
                                          output_field=CharField()),
                society_name=F('society__name'),
                society_address=Concat(F('society__partner__street1'),
                                       V(','),
                                       F('society__partner__street2'),
                                       V(','),
                                       F('society__partner__city'),
                                       V(','),
                                       F('society__partner__state__name'),
                                       V(','),
                                       F('society__partner__country__name'),
                                       V(','),
                                       F('society__partner__zip_code'),
                                       output_field=CharField()),
                vendor_name=F('vendor__name'),
                vendor_address=Concat(F('vendor__street1'),
                                      V(','),
                                      F('vendor__street2'),
                                      V(',\n '),
                                      F('vendor__city'),
                                      V(','),
                                      F('vendor__zip_code'),
                                      V(',\n '),
                                      F('vendor__state__name'),
                                      V(','),
                                      F('vendor__country__name'),
                                      V(',\n '),
                                      F('vendor__mobile_no'),
                                      V(',\n '),
                                      F('vendor__email'),
                                      output_field=CharField()),
            ).prefetch_related(
                Prefetch('purchaseorderline_set',
                         queryset=purchase_order_lines,
                         to_attr='purchase_order_lines'))

        for each in record:
            data.update({
                'society_name':
                each.society_name,
                'society_address':
                each.society_address,
                'vendor_name':
                each.vendor_name,
                'vendor_address':
                each.vendor_address,
                'purchase_order_no':
                each.purchase_order_no,
                'po_date':
                each.po_date,
                'grand_total':
                each.grand_total,
                'special_instructions':
                each.special_instructions,
                'pk':
                each.pk,
                'purchase_order_lines': [{
                    'product_name':
                    line.product_name,
                    'quantity':
                    line.quantity,
                    'product_unit_price':
                    line.product_unit_price
                } for line in each.purchase_order_lines]
            })
        return data
Ejemplo n.º 17
0
 def resolve_all_reporters(self, info, **args):
     return Reporter.objects.annotate(
         full_name=Concat(
             "first_name", Value(" "), "last_name", output_field=TextField()
         )
     )
Ejemplo n.º 18
0
 def id_field(self):
     slash = Value('/')
     return {
         'ledger_reference':
         Concat('case_id', slash, 'section_id', slash, 'entry_id')
     }
Ejemplo n.º 19
0
 def get_name_email(self, queryset, name, value):
     queryset = queryset.annotate(
         name=Concat('first_name', Value(' '), 'last_name'))
     return queryset.filter(
         Q(name__icontains=value) | Q(email__icontains=value))
Ejemplo n.º 20
0
def find_student(request):
    codigo_mined = request.POST.get('codigo_mined')
    current_year = get_current_year()
    if Student.objects.filter(code_mined=codigo_mined).exists():
        student = Student.objects.filter(code_mined=codigo_mined). \
            annotate(nombre_completo=Concat('names', Value(' '), 'last_name')). \
            get()
        student = {
            'id': student.id,
            'nombre_completo': student.nombre_completo,
            'genero': student.gender.name,
            'nacionalidad': student.nationality.name,
            'codigo_mined': student.code_mined,
            'fecha_nacimiento': student.birthday,
            'edad': student.calculate_age()
        }

        matriculation = Matriculation.objects. \
            filter(teaching_year=current_year,
                   student_id=student.get('id')).get()
        matriculation = {
            'id': matriculation.id,
            'grado_seccion': matriculation.grade_section.__str__(),
            'anio_curso': matriculation.teaching_year,
            'fecha_registro': matriculation.registration_date,
        }

        notes_set = Note.objects.filter(
            matriculation_id=matriculation.get('id')).all()

        notes = list()
        if notes_set.exists():
            for note in notes_set.iterator():
                notes.append({
                    'Asignatura':
                    note.course.name,
                    'Docente':
                    '{} {}'.format(note.teacher.first_name,
                                   note.teacher.last_name),
                    'Bimensual I':
                    note.bimonthly_I or 0,
                    'Bimensual II':
                    note.bimonthly_II or 0,
                    'Semestral I':
                    note.biannual_I or 0,
                    'Bimensual III':
                    note.bimonthly_III or 0,
                    'Bimensual IV':
                    note.bimonthly_IV or 0,
                    'Semestral II':
                    note.biannual_II or 0,
                    'Nota Final':
                    note.final or 0
                })
    else:
        student = None
        matriculation = None
        notes = None

    response = {
        'student': student,
        'matriculation': matriculation,
        'notes': notes,
        'status': True
    }

    return JsonResponse(response)
Ejemplo n.º 21
0
def get_report_summary(drops, total_log_count):
    summary = {
        'table': {},
        'chart': [],
    }

    # Chart data: list of {'drop': <string>, 'count': <int>}
    # Table data: dict (by drop type) of lists of items which drop, with stats. 'count' is only required stat.
    for drop_type, qs in drops.items():
        if drop_type == models.ItemDrop.RELATED_NAME:
            # Chart excludes currency
            chart_data = list(
                qs.exclude(item__category=GameItem.CATEGORY_CURRENCY, ).values(
                    name=F('item__name'), ).annotate(
                        count=Count('pk'), ).filter(
                            count__gt=0).order_by('-count'))

            table_data = list(
                qs.values(
                    name=F('item__name'),
                    icon=F('item__icon'),
                ).annotate(
                    count=Count('pk'),
                    min=Min('quantity'),
                    max=Max('quantity'),
                    avg=Avg('quantity'),
                    drop_chance=Cast(Count('pk'), FloatField()) /
                    total_log_count * 100,
                    avg_per_run=Cast(Sum('quantity'), FloatField()) /
                    total_log_count,
                ).filter(count__gt=0).order_by('item__category', '-count'))
        elif drop_type == models.MonsterDrop.RELATED_NAME:
            # Monster drops in chart are counted by stars
            chart_data = list(
                qs.values(name=Concat(Cast('grade', CharField()),
                                      Value('⭐ Monster'))).annotate(
                                          count=Count('pk')).filter(
                                              count__gt=0).order_by('-count'))

            table_data = replace_value_with_choice(
                list(
                    qs.values(
                        name=F('monster__name'),
                        slug=F('monster__bestiary_slug'),
                        icon=F('monster__image_filename'),
                        element=F('monster__element'),
                        can_awaken=F('monster__can_awaken'),
                        is_awakened=F('monster__is_awakened'),
                        stars=F('monster__base_stars'),
                    ).annotate(
                        count=Count('pk'),
                        drop_chance=Cast(Count('pk'), FloatField()) /
                        total_log_count * 100,
                    )), {'element': Monster.ELEMENT_CHOICES})
        else:
            # Chart can is name, count only
            item_name = ' '.join(
                [s.capitalize() for s in drop_type.split('_')]).rstrip('s')
            count = qs.aggregate(count=Count('pk'))['count']
            if count > 0:
                chart_data = [{
                    'name': item_name,
                    'count': count,
                }]
            else:
                chart_data = []

            # Table data based on item type
            if drop_type == models.MonsterPieceDrop.RELATED_NAME:
                table_data = replace_value_with_choice(
                    list(
                        qs.values(
                            name=F('monster__name'),
                            icon=F('monster__image_filename'),
                            element=F('monster__element'),
                            count=Count('pk'),
                            min=Min('quantity'),
                            max=Max('quantity'),
                            avg=Avg('quantity'),
                        )), {'element': Monster.ELEMENT_CHOICES})
            elif drop_type == models.RuneDrop.RELATED_NAME:
                table_data = {
                    'sets':
                    replace_value_with_choice(
                        list(
                            qs.values('type').annotate(
                                count=Count('pk')).order_by('type')),
                        {'type': Rune.TYPE_CHOICES}),
                    'slots':
                    list(
                        qs.values('slot').annotate(
                            count=Count('pk')).order_by('slot')),
                    'quality':
                    replace_value_with_choice(
                        list(
                            qs.values('quality').annotate(
                                count=Count('pk')).order_by('quality')),
                        {'quality': Rune.QUALITY_CHOICES}),
                }
            elif drop_type == models.RuneCraftDrop.RELATED_NAME:
                table_data = replace_value_with_choice(
                    list(
                        qs.values('type', 'rune', 'quality').annotate(
                            count=Count('pk'), ).order_by(
                                'type', 'rune', 'quality')), {
                                    'type': Rune.TYPE_CHOICES,
                                    'quality': Rune.QUALITY_CHOICES
                                })
            elif drop_type == models.DungeonSecretDungeonDrop.RELATED_NAME:
                table_data = replace_value_with_choice(
                    list(
                        qs.values(
                            name=F(
                                'level__dungeon__secretdungeon__monster__name'
                            ),
                            element=
                            F('level__dungeon__secretdungeon__monster__element'
                              ),
                            icon=
                            F('level__dungeon__secretdungeon__monster__image_filename'
                              ),
                        ).annotate(count=Count('pk'), )),
                    {'element': Monster.ELEMENT_CHOICES})

            else:
                raise NotImplementedError(
                    f"No summary table generation for {drop_type}")

        summary['chart'] += chart_data

        if table_data:
            summary['table'][drop_type] = table_data

    return summary
Ejemplo n.º 22
0
def get_products_data(
    queryset: "QuerySet",
    export_fields: Set[str],
    attribute_ids: Optional[List[int]],
    warehouse_ids: Optional[List[int]],
    channel_ids: Optional[List[int]],
) -> List[Dict[str, Union[str, bool]]]:
    """Create data list of products and their variants with fields values.

    It return list with product and variant data which can be used as import to
    csv writer and list of attribute and warehouse headers.
    """

    products_with_variants_data = []

    product_fields = set(
        ProductExportFields.HEADERS_TO_FIELDS_MAPPING["fields"].values())
    product_export_fields = export_fields & product_fields
    product_export_fields.add("variants__id")

    products_data = (queryset.annotate(
        product_weight=Case(
            When(weight__isnull=False, then=Concat("weight", V(" g"))),
            default=V(""),
            output_field=CharField(),
        ),
        variant_weight=Case(
            When(
                variants__weight__isnull=False,
                then=Concat("variants__weight", V(" g")),
            ),
            default=V(""),
            output_field=CharField(),
        ),
        description_as_str=Cast("description", CharField()),
    ).order_by("pk", "variants__pk").values(*product_export_fields).distinct(
        "pk", "variants__pk"))

    products_relations_data = get_products_relations_data(
        queryset, export_fields, attribute_ids, channel_ids)

    variants_relations_data = get_variants_relations_data(
        queryset, export_fields, attribute_ids, warehouse_ids, channel_ids)

    for product_data in products_data:
        pk = product_data["id"]
        variant_pk = product_data.pop("variants__id")

        product_relations_data: Dict[str, str] = products_relations_data.get(
            pk, {})
        variant_relations_data: Dict[str, str] = variants_relations_data.get(
            variant_pk, {})

        product_data["id"] = graphene.Node.to_global_id("Product", pk)
        data = {
            **product_data,
            **product_relations_data,
            **variant_relations_data
        }

        products_with_variants_data.append(data)

    return products_with_variants_data
Ejemplo n.º 23
0
def list_json(request):
    condition = request.POST.get('condition', '{}')
    try:
        condition = json.loads(condition)
    except json.decoder.JSONDecodeError:
        condition = dict()

    page = condition.get('page')
    size = 10
    search_text = condition.get('search_text', '')
    order_by = condition.get('order_by', 'pk')
    order_by_reverse = condition.get('order_by_reverse', 'True')
    all_ = condition.get('all_', 'False')
    search_project = condition.get('search_project', None)

    page = change_to_positive_integer(page, 1)
    size = change_to_positive_integer(size, 10)
    if order_by_reverse == 'True':
        order_by_reverse = True
    else:
        order_by_reverse = False
    if all_ == 'False':
        all_ = False
    else:
        all_ = True
    if search_project in ('', 'None'):
        search_project = None

    q = get_query_condition(search_text)
    q &= Q(is_active=True)
    if not all_:
        q &= Q(creator=request.user)
    if search_project:
        q &= Q(project=search_project)

    objects = Case.objects.filter(q).values(
        'pk', 'uuid', 'name', 'keyword', 'project__name', 'creator',
        'creator__username',
        'modified_date').annotate(real_name=Concat('creator__last_name',
                                                   'creator__first_name',
                                                   output_field=CharField()))
    # 排序
    if objects:
        if order_by not in objects[0]:
            order_by = 'pk'
        objects = sorted(objects,
                         key=lambda x: x[order_by],
                         reverse=order_by_reverse)
    paginator = Paginator(objects, size)
    try:
        objects = paginator.page(page)
    except PageNotAnInteger:
        objects = paginator.page(1)
        page = 1
    except EmptyPage:
        objects = paginator.page(paginator.num_pages)
        page = paginator.num_pages

    for obj in objects:
        obj['url'] = reverse(detail, args=[obj['pk']])
        obj['modified_date_sort'] = obj['modified_date'].strftime('%Y-%m-%d')
        obj['modified_date'] = obj['modified_date'].strftime(
            '%Y-%m-%d %H:%M:%S')

    return JsonResponse({
        'state': 1,
        'message': 'OK',
        'data': {
            'objects': list(objects),
            'page': page,
            'max_page': paginator.num_pages,
            'size': size
        }
    })
Ejemplo n.º 24
0
 def refers_qs(self, queryset, name, value):
     return queryset.annotate(
         refers_nr=Concat('refers__prefix', 'refers__invoice_no')).filter(
             refers_nr__iexact=value)
Ejemplo n.º 25
0
 def get_queryset(self):
     queryset = super().get_queryset()
     queryset = queryset.annotate(user_display=Concat(
         F('user__name'), Value('('), F('user__username'), Value(')')))
     return queryset
Ejemplo n.º 26
0
 def get_queryset(self):
     return self.request.event.invoices.prefetch_related(
         'lines').select_related(
             'order', 'refers').annotate(nr=Concat('prefix', 'invoice_no'))
Ejemplo n.º 27
0
 def filter_full_name(self, queryset, field_name, value):
     if value:
         queryset = queryset.annotate(fullname=Concat(F('group__slug'), V('/'), F('slug'),
                                      output_field=CharField())).filter(fullname__startswith=value)
     return queryset
Ejemplo n.º 28
0
def busqueda(request):
    nombrearea = Categoria.objects.all()
    query = request.GET.get("q")
    nombrear = request.GET.get("dropdown1")
    user = request.user
    if str(user) == 'admin':
        admin = True
    else:
        admin = False

    if nombrear != "" and query == "":
        if nombrear == "Todas las categorias":
            libros = Libro.objects.all().order_by('-fecha_registro')
            nombrearea = Categoria.objects.exclude(nombre=nombrear)
        else:
            libros = Libro.objects.filter(
                Q(categoria__nombre__iexact=nombrear))
            nombrearea = Categoria.objects.exclude(nombre=nombrear)
        return render(request, 'busqueda.html', {
            'libros': libros,
            'nombrearea': nombrearea,
            'user': admin,
        })
    elif query != "" and nombrear == "":
        if query:
            libros = Libro.objects.annotate(nombrecompleto=Concat(
                'autor__nombre', Value(' '), 'autor__apellido'), ).filter(
                    Q(titulo__icontains=query)
                    | Q(autor__nombre__icontains=query)
                    | Q(autor__apellido__icontains=query)
                    | Q(nombrecompleto__icontains=query)
                    | Q(palabras_claves__icontains=query))
        return render(request, 'busqueda.html', {
            'libros': libros,
            'nombrearea': nombrearea,
            'user': admin,
        })
    elif query != "" and nombrear != "":
        if nombrear == "Todas las areas":
            libros = Libro.objects.annotate(nombrecompleto=Concat(
                'autor__nombre', Value(' '), 'autor__apellido'), ).filter(
                    Q(titulo__icontains=query)
                    | Q(autor__nombre__icontains=query)
                    | Q(autor__apellido__icontains=query)
                    | Q(nombrecompleto__icontains=query)
                    | Q(palabras_claves__icontains=query))
        else:
            libros = Libro.objects.annotate(nombrecompleto=Concat(
                'autor__nombre', Value(' '), 'autor__apellido'), ).filter(
                    Q(categoria__nombre__iexact=nombrear)
                    & (Q(titulo__icontains=query)
                       | Q(autor__nombre__icontains=query)
                       | Q(autor__apellido__icontains=query)
                       | Q(nombrecompleto__icontains=query)
                       | Q(palabras_claves__icontains=query)))
        nombrearea = Categoria.objects.exclude(nombre=nombrear)
        return render(request, 'busqueda.html', {
            'libros': libros,
            'nombrearea': nombrearea,
            'user': admin,
        })
    elif nombrear == "" and query == "":
        return HttpResponseRedirect(reverse('index'))
Ejemplo n.º 29
0
 def _rewrite_node_path(cls, old_path, new_path):
     cls.objects.filter(path__startswith=old_path).update(
         path=Concat(Value(new_path), Substr('path',
                                             len(old_path) + 1)))
Ejemplo n.º 30
0
    def RunGameProperties(p_season, p_round, p_homeClub):

        try:
            game_m = FT.Game.objects.get(SeasonFK__Season=p_season,
                                         Round=p_round,
                                         ClubHomeFK__Club=p_homeClub)
        except FT.Game.DoesNotExist:
            hret = CU.HttpReturn()
            hret.results = "No game available."
            hret.status = 201
            return hret

        # get home line-up

        home_lnEvts = FT.GameEvent.objects.annotate(FirstLast=Concat(
            'PlayerFK__FirstName', Value(' '), 'PlayerFK__LastName')).values(
                'EventFK__Event', 'FirstLast', 'Position').filter(
                    GameFK=game_m, EventClubFK__Club=p_homeClub).filter(
                        EventFK__Event__in=['Player in Line-up'])

        home_lineup = OrderedDict([('Goalkeeper', []), ('Defender', []),
                                   ('Midfield', []), ('Forward', [])])

        for evt in home_lnEvts:
            home_lineup[evt['Position']].append(evt['FirstLast'])

        for pos in home_lineup:
            players = sorted(home_lineup[pos])
            home_lineup[pos] = players

        # get away line-up

        away_lnEvts = FT.GameEvent.objects.annotate(FirstLast=Concat(
            'PlayerFK__FirstName', Value(' '), 'PlayerFK__LastName')).values(
                'EventFK__Event', 'FirstLast', 'Position').filter(
                    GameFK=game_m,
                    EventClubFK__Club=game_m.ClubAwayFK.Club).filter(
                        EventFK__Event__in=['Player in Line-up'])

        away_lineup = OrderedDict([('Goalkeeper', []), ('Defender', []),
                                   ('Midfield', []), ('Forward', [])])

        for evt in away_lnEvts:
            away_lineup[evt['Position']].append(evt['FirstLast'])

        for pos in away_lineup:
            players = sorted(away_lineup[pos])
            away_lineup[pos] = players

        # get home events

        home_mvEvts = FT.GameEvent.objects.annotate(FirstLast=Concat(
            'PlayerFK__FirstName', Value(' '), 'PlayerFK__LastName'
        )).values('FirstLast', 'EventFK__Event', 'EventTime').filter(
            GameFK=game_m, EventClubFK__Club=game_m.ClubHomeFK.Club).filter(
                Q(EventFK__Type='Move')
                | Q(EventFK__Event__in=['Player Sub-out', 'Player Sub-in']))

        home_moves = []
        for evt in home_mvEvts:
            move = OrderedDict()
            move['player'] = evt['FirstLast']
            move['event'] = evt['EventFK__Event']
            move['game_time'] = evt['EventTime'].strftime("%H:%M")
            home_moves.append(move)

        home_moves = sorted(home_moves, key=lambda x: x['event'])
        home_moves = sorted(home_moves, key=lambda x: x['game_time'])

        # get away events

        away_mvEvts = FT.GameEvent.objects.annotate(FirstLast=Concat(
            'PlayerFK__FirstName', Value(' '), 'PlayerFK__LastName'
        )).values('FirstLast', 'EventFK__Event', 'EventTime').filter(
            GameFK=game_m, EventClubFK__Club=game_m.ClubAwayFK.Club).filter(
                Q(EventFK__Type='Move')
                | Q(EventFK__Event__in=['Player Sub-out', 'Player Sub-in']))

        away_moves = []
        for evt in away_mvEvts:
            move = OrderedDict()
            move['player'] = evt['FirstLast']
            move['event'] = evt['EventFK__Event']
            move['game_time'] = evt['EventTime'].strftime("%H:%M")
            away_moves.append(move)

        away_moves = sorted(away_moves, key=lambda x: x['event'])
        away_moves = sorted(away_moves, key=lambda x: x['game_time'])

        # create return structures

        home_dt = {
            'name': p_homeClub,
            'goals': game_m.GoalsHome if game_m.GoalsHome is not None else '*',
            'lineup': home_lineup,
            'moves': home_moves,
        }

        away_dt = {
            'name': game_m.ClubAwayFK.Club,
            'goals': game_m.GoalsAway if game_m.GoalsAway is not None else '*',
            'lineup': away_lineup,
            'moves': away_moves,
        }

        clubs = {
            'home': home_dt,
            'away': away_dt,
        }

        hret = CU.HttpReturn()
        hret.results = clubs
        hret.status = 201
        return hret