Ejemplo n.º 1
0
 def aggregate_words(self):
     return dict(total_words=Coalesce(Sum("total_words"), 0),
                 fuzzy_words=Coalesce(Sum("fuzzy_words"), 0),
                 translated_words=Coalesce(Sum("translated_words"), 0))
Ejemplo n.º 2
0
def nav_context_list(request):
    query = request.GET.get('query', '').strip()
    organizer = request.GET.get('organizer', None)

    try:
        page = int(request.GET.get('page', '1'))
    except ValueError:
        page = 1

    qs_events = request.user.get_events_with_any_permission(request).filter(
        Q(name__icontains=i18ncomp(query)) | Q(slug__icontains=query)
    ).annotate(
        min_from=Min('subevents__date_from'),
        max_from=Max('subevents__date_from'),
        max_to=Max('subevents__date_to'),
        max_fromto=Greatest(Max('subevents__date_to'), Max('subevents__date_from'))
    ).annotate(
        order_from=Coalesce('min_from', 'date_from'),
    ).order_by('-order_from')

    if request.user.has_active_staff_session(request.session.session_key):
        qs_orga = Organizer.objects.all()
    else:
        qs_orga = Organizer.objects.filter(pk__in=request.user.teams.values_list('organizer', flat=True))
    if query:
        qs_orga = qs_orga.filter(Q(name__icontains=query) | Q(slug__icontains=query))

    if query:
        qs_orders = Order.objects.filter(
            code__icontains=query
        ).select_related('event', 'event__organizer').only('event', 'code', 'pk').order_by()
        if not request.user.has_active_staff_session(request.session.session_key):
            qs_orders = qs_orders.filter(
                Q(event__organizer_id__in=request.user.teams.filter(
                    all_events=True, can_view_orders=True).values_list('organizer', flat=True))
                | Q(event_id__in=request.user.teams.filter(
                    can_view_orders=True).values_list('limit_events__id', flat=True))
            )

        qs_vouchers = Voucher.objects.filter(
            code__icontains=query
        ).select_related('event', 'event__organizer').only('event', 'code', 'pk').order_by()
        if not request.user.has_active_staff_session(request.session.session_key):
            qs_vouchers = qs_vouchers.filter(
                Q(event__organizer_id__in=request.user.teams.filter(
                    all_events=True, can_view_vouchers=True).values_list('organizer', flat=True))
                | Q(event_id__in=request.user.teams.filter(
                    can_view_vouchers=True).values_list('limit_events__id', flat=True))
            )
    else:
        qs_vouchers = Voucher.objects.none()
        qs_orders = Order.objects.none()

    show_user = not query or (
        query and request.user.email and query.lower() in request.user.email.lower()
    ) or (
        query and request.user.fullname and query.lower() in request.user.fullname.lower()
    )
    total = qs_events.count() + qs_orga.count()
    pagesize = 20
    offset = (page - 1) * pagesize
    results = ([
        serialize_user(request.user)
    ] if show_user else []) + [
        serialize_orga(e) for e in qs_orga[offset:offset + (pagesize if query else 5)]
    ] + [
        serialize_event(e) for e in qs_events.select_related('organizer')[offset:offset + (pagesize if query else 5)]
    ] + [
        serialize_order(e) for e in qs_orders[offset:offset + (pagesize if query else 5)]
    ] + [
        serialize_voucher(e) for e in qs_vouchers[offset:offset + (pagesize if query else 5)]
    ]

    if show_user and organizer:
        try:
            organizer = Organizer.objects.get(pk=organizer)
        except Organizer.DoesNotExist:
            pass
        else:
            if request.user.has_organizer_permission(organizer, request=request):
                organizer = serialize_orga(organizer)
                if organizer in results:
                    results.remove(organizer)
                results.insert(1, organizer)

    doc = {
        'results': results,
        'pagination': {
            "more": total >= (offset + pagesize)
        }
    }
    return JsonResponse(doc)
Ejemplo n.º 3
0
def punto_venta_cerrar(
    usuario_pv_id: int,
    entrega_efectivo_dict: dict,
    operaciones_caja_dict,
    entrega_base_dict: dict,
    valor_tarjeta: float,
    nro_vauchers: int,
    valor_dolares: float,
    tasa_dolar: float,
) -> [PuntoVenta, ArqueoCaja]:
    usuario = User.objects.get(pk=usuario_pv_id)
    if hasattr(usuario, 'tercero'):
        tercero = usuario.tercero
        punto_venta_turno = tercero.turno_punto_venta_abierto
        if punto_venta_turno:
            from cajas.models import (ArqueoCaja, EfectivoEntregaDenominacion,
                                      BaseDisponibleDenominacion)
            from cajas.services import (
                transaccion_caja_registrar_egreso_entrega_base_cierre_caja,
                transaccion_caja_registrar_egreso_entrega_efectivo_cierre_caja,
                operacion_caja_crear)

            from cajas.models import ConceptoOperacionCaja
            for operacion_caja in operaciones_caja_dict:
                id_concepto = int(operacion_caja.get('id'))
                concepto = ConceptoOperacionCaja.objects.get(pk=id_concepto)
                valor = float(operacion_caja.get('valor'))

                if valor > 0:
                    operacion_caja_crear(concepto_id=id_concepto,
                                         usuario_pdv_id=usuario_pv_id,
                                         valor=valor,
                                         observacion='Desde cierre de caja')

            # region Valores Transacciones
            transacciones_egresos = punto_venta_turno.transacciones_caja.filter(
                punto_venta_turno_id=punto_venta_turno.id, tipo='E')
            transacciones_ingresos = punto_venta_turno.transacciones_caja.filter(
                punto_venta_turno_id=punto_venta_turno.id, tipo='I')

            total_ingreso_efectivo = transacciones_ingresos.aggregate(
                total=Coalesce(Sum('valor_efectivo'), 0))['total']

            total_ingreso_tarjeta = transacciones_ingresos.aggregate(
                total=Coalesce(Sum('valor_tarjeta'), 0))['total']

            total_egreso_efectivo = -transacciones_egresos.aggregate(
                total=Coalesce(Sum('valor_efectivo'), 0))['total']

            total_egreso_tarjeta = -transacciones_egresos.aggregate(
                total=Coalesce(Sum('valor_tarjeta'), 0))['total']

            cantidad_ventas_tarjeta = transacciones_ingresos.aggregate(
                cantidad=Coalesce(Sum('nro_vauchers'), 0))['cantidad']

            total_a_recibir_efectivo = total_ingreso_efectivo - total_egreso_efectivo
            total_a_recibir_tarjeta = total_ingreso_tarjeta - total_egreso_tarjeta

            # endregion
            arqueo = ArqueoCaja.objects.create(
                punto_venta_turno_id=punto_venta_turno.id,
                valor_pago_efectivo_a_entregar=total_a_recibir_efectivo,
                valor_pago_tarjeta_a_entregar=total_a_recibir_tarjeta,
                nro_voucher_a_entregar=cantidad_ventas_tarjeta,
                dolares_tasa=tasa_dolar,
                valor_dolares_entregados=valor_dolares,
                valor_tarjeta_entregados=valor_tarjeta,
                nro_voucher_entregados=nro_vauchers)

            for denominacion in entrega_efectivo_dict:
                cantidad = int(denominacion.get('cantidad'))
                valor = float(denominacion.get('valor'))
                valor_total = cantidad * valor
                if cantidad > 0:
                    EfectivoEntregaDenominacion.objects.create(
                        arqueo_caja=arqueo,
                        valor_total=valor_total,
                        **denominacion)
            for denominacion in entrega_base_dict:
                cantidad = int(denominacion.get('cantidad'))
                valor = float(denominacion.get('valor'))
                valor_total = cantidad * valor
                if cantidad > 0:
                    BaseDisponibleDenominacion.objects.create(
                        arqueo_caja=arqueo,
                        valor_total=valor_total,
                        **denominacion)

            if 'test' in sys.argv:
                print(
                    '-----------------DATOS ARQUEO CAJA-------------------------'
                )
                print('Arqueo dinero entrega efectivo %s' %
                      arqueo.valor_entrega_efectivo)
                print('Arqueo dinero entrega base efectivo %s' %
                      arqueo.valor_base_dia_siguiente)
                print('Arqueo dinero entrega dolares %s' %
                      arqueo.valor_dolares_en_pesos)
                print('Arqueo dinero entrega efectivo total %s' %
                      arqueo.valor_entrega_efectivo_total)

                print(
                    '-----------------------------------------------------------'
                )
                print('Valor ingresos totales %s' %
                      (total_ingreso_efectivo + total_ingreso_tarjeta))
                print('Valor egresos totales %s' %
                      (total_egreso_efectivo + total_egreso_tarjeta))
                print('Valor ingreso transacciones en efectivo %s' %
                      total_ingreso_efectivo)
                print('Valor ingreso transacciones en tarjeta %s' %
                      total_ingreso_tarjeta)
                print('Valor egresos transacciones en efectivo %s' %
                      total_egreso_efectivo)
                print('Valor egresos transacciones en tarjeta %s' %
                      total_egreso_tarjeta)
                print('Valor a recibir transacciones en efectivo %s' %
                      total_a_recibir_efectivo)

            total_entrega_efectivo = arqueo.valor_entrega_efectivo_total

            transaccion_caja_registrar_egreso_entrega_efectivo_cierre_caja(
                punto_venta_turno_id=punto_venta_turno.id,
                valor_efectivo=total_entrega_efectivo)
            transaccion_caja_registrar_egreso_entrega_base_cierre_caja(
                punto_venta_turno_id=punto_venta_turno.id,
                valor_efectivo=arqueo.valor_base_dia_siguiente)

            total_entrega_tarjeta = arqueo.valor_tarjeta_entregados

            descuadre_efectivo = total_a_recibir_efectivo - total_entrega_efectivo
            descuadre_tarjeta = total_a_recibir_tarjeta - total_entrega_tarjeta

            if 'test' in sys.argv:
                print(
                    '-----------------------------------------------------------'
                )
                print('Descuadre por efectivo %s' % descuadre_efectivo)
                print('Descuadre por tarjeta %s' % descuadre_tarjeta)

            punto_venta = punto_venta_turno.punto_venta
            punto_venta.abierto = False
            punto_venta.usuario_actual = None
            punto_venta.save()
            punto_venta_turno.finish = timezone.now()
            punto_venta_turno.saldo_cierre_caja = arqueo.diferencia
            punto_venta_turno.save()
            if 'test' in sys.argv:
                print('el saldo es %s' %
                      punto_venta_turno.diferencia_cierre_caja)
        else:
            raise serializers.ValidationError({
                '_error':
                'Este tercero no posee ningún punto de venta abierto actualmente'
            })
    else:
        raise serializers.ValidationError({
            '_error':
            'El usuario no tiene un tercero relacionado, por ende, no tiene ningún punto de venta que cerrar'
        })
    return punto_venta, arqueo
Ejemplo n.º 4
0
class CotizacionComponenteViewSet(viewsets.ModelViewSet):
    permission_classes = [permissions.IsAuthenticated]
    queryset = CotizacionComponente.objects.select_related(
        'responsable',
        'creado_por',
        'creado_por',
        'cliente',
        'cliente__colaborador_componentes',
        'cliente__canal',
        'ciudad',
        'contacto',
        'ciudad__departamento',
        'ciudad__departamento__pais',
    ).prefetch_related(
        'items',
        'items__forma_pago',
        'items__forma_pago__canal',
        'adjuntos',
        'versiones',
        'seguimientos',
        'envios_emails',
        'envios_emails__creado_por',
        'envios_emails__archivo',
        'seguimientos__creado_por',
        'seguimientos__documento_cotizacion',
    ).annotate(valor_total=Coalesce(Sum('items__valor_total'), 0)).all()
    serializer_class = CotizacionComponenteSerializer

    def perform_create(self, serializer):
        serializer.save(creado_por=self.request.user)

    def retrieve(self, request, *args, **kwargs):
        self.serializer_class = CotizacionComponenteConDetalleSerializer
        return super().retrieve(request, *args, **kwargs)

    def update(self, request, *args, **kwargs):
        self.serializer_class = CotizacionComponenteConDetalleSerializer
        return super().update(request, *args, **kwargs)

    def list(self, request, *args, **kwargs):
        self.queryset = self.queryset.using('read_only')
        return super().list(request, *args, **kwargs)

    def destroy(self, request, *args, **kwargs):
        cotizacion = self.get_object()
        if cotizacion.estado == 'INI' and (cotizacion.nro_consecutivo is None):
            cotizacion.items.all().delete()
            return super().destroy(request, *args, **kwargs)
        raise ValidationError(
            {'_error': 'Imposible eliminar cotización, ya ha sido enviada'})

    @action(detail=True, methods=['post'])
    def relacionar_factura(self, request, pk=None):
        from .services import relacionar_cotizacion_con_factura
        factura_id = request.POST.get('factura_id')
        accion = request.POST.get('accion')
        relacionar_cotizacion_con_factura(cotizacion_componente_id=pk,
                                          factura_id=factura_id,
                                          accion=accion)
        serializer = self.get_serializer(self.get_object())
        return Response(serializer.data)

    @action(detail=True, methods=['post'])
    def asignar_consecutivo(self, request, pk=None):
        from .services import cotizacion_componentes_asignar_nro_consecutivo
        cotizacion_componente = cotizacion_componentes_asignar_nro_consecutivo(
            cotizacion_componente=self.get_object())
        self.serializer_class = CotizacionComponenteConDetalleSerializer
        serializer = self.get_serializer(cotizacion_componente)
        return Response(serializer.data)

    @action(detail=False, http_method_names=[
        'get',
    ])
    def cotizaciones_por_ano_mes(self, request):
        months = self.request.GET.get('months').split(',')
        years = self.request.GET.get('years').split(',')
        lista = self.queryset.filter((Q(orden_compra_fecha__year__in=years)
                                      & Q(orden_compra_fecha__month__in=months)
                                      & Q(estado__in=['PRO', 'FIN']))
                                     | Q(estado__in=['ENV', 'REC']))
        serializer = self.get_serializer(lista, many=True)
        return Response(serializer.data)

    @action(detail=True, methods=['post'])
    def cambiar_estado(self, request, pk=None):
        from .services import cotizacion_componentes_cambiar_estado
        orden_compra_fecha = request.POST.get('orden_compra_fecha', None)
        orden_compra_nro = request.POST.get('orden_compra_nro', None)
        orden_compra_valor = float(request.POST.get('orden_compra_valor', 0))
        nuevo_estado = request.POST.get('nuevo_estado', None)
        razon_rechazo = request.POST.get('razon_rechazo', None)
        fecha_verificacion_proximo_seguimiento = request.POST.get(
            'fecha_verificacion_proximo_seguimiento', None)
        cotizacion_componentes_cambiar_estado(
            cotizacion_componente_id=pk,
            nuevo_estado=nuevo_estado,
            razon_rechazo=razon_rechazo,
            usuario=self.request.user,
            fecha_verificacion_proximo_seguimiento=
            fecha_verificacion_proximo_seguimiento,
            orden_compra_nro=orden_compra_nro,
            orden_compra_valor=orden_compra_valor,
            orden_compra_fecha=orden_compra_fecha)
        self.serializer_class = CotizacionComponenteConDetalleSerializer
        serializer = self.get_serializer(self.get_object())
        return Response(serializer.data)

    @action(detail=True, methods=['post'])
    def cambiar_fecha_proximo_seguimiento_lista(self, request, pk=None):
        from .services import cotizacion_componentes_cambiar_fecha_proximo_seguimiento
        fecha_verificacion_proximo_seguimiento = request.POST.get(
            'fecha_verificacion_proximo_seguimiento', None)
        fecha_proximo_seguimiento_descripcion = request.POST.get(
            'fecha_proximo_seguimiento_descripcion', None)

        cotizacion_componentes_cambiar_fecha_proximo_seguimiento(
            cotizacion_componente_id=pk,
            usuario=self.request.user,
            fecha_verificacion_proximo_seguimiento=
            fecha_verificacion_proximo_seguimiento,
            fecha_proximo_seguimiento_descripcion=
            fecha_proximo_seguimiento_descripcion)
        serializer = self.get_serializer(self.get_object())
        return Response(serializer.data)

    # TODO: Mejorar este método, muchos queries a la hora de enviar
    @action(detail=True, methods=['post'])
    def enviar(self, request, pk=None):
        self.serializer_class = CotizacionComponenteConDetalleSerializer
        from .services import cotizacion_componentes_enviar
        email_uno = request.POST.get('email_uno', None)
        email_dos = request.POST.get('email_dos', None)
        email_tres = request.POST.get('email_tres', None)
        email_cuatro = request.POST.get('email_cuatro', None)
        email_asesor = request.POST.get('email_asesor', None)
        no_enviar = request.POST.get('no_enviar', None)
        fecha_verificacion_proximo_seguimiento = request.POST.get(
            'fecha_verificacion_proximo_seguimiento', None)

        emails_destino = []
        if email_uno:
            emails_destino.append(email_uno)
        if email_dos:
            emails_destino.append(email_dos)
        if email_tres:
            emails_destino.append(email_tres)
        if email_cuatro:
            emails_destino.append(email_cuatro)
        if email_asesor:
            emails_destino.append(email_asesor)

        cotizacion_componentes_enviar(
            cotizacion_componente=self.queryset.get(pk=pk),
            no_enviar=no_enviar,
            request=request,
            emails_destino=emails_destino,
            fecha_verificacion_proximo_seguimiento=
            fecha_verificacion_proximo_seguimiento)
        serializer = self.get_serializer(self.get_object())
        return Response(serializer.data)

    @action(detail=True, methods=['post'])
    def imprimir(self, request, pk=None):
        from .services import cotizacion_componentes_generar_pdf
        response = HttpResponse(content_type='application/pdf')
        output = cotizacion_componentes_generar_pdf(
            cotizacion_componente=self.queryset.get(pk=pk), request=request)
        response.write(output.getvalue())
        output.close()
        response[
            'Content-Disposition'] = 'attachment; filename="somefilename.pdf"'
        response['Content-Transfer-Encoding'] = 'binary'
        return response

    @action(detail=True, methods=['post'])
    def adicionar_seguimiento(self, request, pk=None):
        cotizacion_componente = self.get_object()
        tipo_seguimiento = request.POST.get('tipo_seguimiento')
        descripcion = request.POST.get('descripcion')
        fecha = request.POST.get('fecha', None)
        fecha_verificacion_proximo_seguimiento = request.POST.get(
            'fecha_verificacion_proximo_seguimiento', None)
        from .services import cotizacion_componentes_add_seguimiento
        cotizacion_componentes_add_seguimiento(
            cotizacion_componente_id=cotizacion_componente.id,
            tipo_seguimiento=tipo_seguimiento,
            descripcion=descripcion,
            creado_por=self.request.user,
            fecha=fecha,
            fecha_verificacion_proximo_seguimiento=
            fecha_verificacion_proximo_seguimiento)
        cotizacion_componente.refresh_from_db()
        self.serializer_class = CotizacionComponenteConDetalleSerializer
        serializer = self.get_serializer(cotizacion_componente)
        return Response(serializer.data)

    @action(detail=True, methods=['post'])
    def eliminar_seguimiento(self, request, pk=None):
        cotizacion_componente = self.get_object()
        seguimiento_id = request.POST.get('seguimiento_id')
        from .services import cotizacion_componentes_delete_seguimiento
        cotizacion_componentes_delete_seguimiento(
            cotizacion_componente_id=cotizacion_componente.id,
            cotizacion_componente_seguimiento_id=seguimiento_id,
            eliminado_por=self.request.user)
        self.serializer_class = CotizacionComponenteConDetalleSerializer
        serializer = self.get_serializer(cotizacion_componente)
        return Response(serializer.data)

    @action(detail=True, methods=['post'])
    def adicionar_item(self, request, pk=None):
        cotizacion = self.get_object()
        tipo_item = request.POST.get('tipo_item')
        precio_unitario = request.POST.get('precio_unitario')
        item_descripcion = request.POST.get('item_descripcion')
        item_referencia = request.POST.get('item_referencia')
        item_unidad_medida = request.POST.get('item_unidad_medida')
        tasa = request.POST.get('tasa', 0)
        moneda_origen = request.POST.get('moneda_origen')
        moneda_origen_costo = request.POST.get('moneda_origen_costo', 0)
        id_item = request.POST.get('id_item', None)
        forma_pago_id = request.POST.get('forma_pago_id', None)
        tipo_transporte = request.POST.get('tipo_transporte', None)
        from .services import cotizacion_componentes_adicionar_item
        cotizacion_componente = cotizacion_componentes_adicionar_item(
            tipo_item=tipo_item,
            cotizacion_componente_id=cotizacion.id,
            precio_unitario=precio_unitario,
            id_item=id_item,
            item_descripcion=item_descripcion,
            item_referencia=item_referencia,
            item_unidad_medida=item_unidad_medida,
            forma_pago_id=forma_pago_id,
            tipo_transporte=tipo_transporte,
            tasa=tasa,
            moneda_origen=moneda_origen,
            moneda_origen_costo=moneda_origen_costo)
        self.serializer_class = CotizacionComponenteConDetalleSerializer
        serializer = self.get_serializer(cotizacion_componente)
        return Response(serializer.data)

    @action(detail=True, methods=['post'])
    def eliminar_item(self, request, pk=None):
        cotizacion = self.get_object()
        id_item_cotizacion = request.POST.get('id_item_cotizacion')
        from .services import cotizacion_componentes_item_eliminar
        cotizacion_componentes_item_eliminar(
            item_componente_id=id_item_cotizacion)
        self.serializer_class = CotizacionComponenteConDetalleSerializer
        serializer = self.get_serializer(cotizacion)
        return Response(serializer.data)

    @action(detail=True, methods=['post'])
    def cambiar_posicion_item(self, request, pk=None):
        from .services import cotizacion_componentes_item_cambiar_posicion
        # print(request.META.get('HTTP_X_FORWARDED_FOR'))
        # print(request.META.get('REMOTE_ADDR'))
        cotizacion_componente = self.get_object()
        item_uno_id = request.POST.get('item_uno_id')
        item_dos_id = request.POST.get('item_dos_id')
        cotizacion_componentes_item_cambiar_posicion(
            cotizacion_componente_id=cotizacion_componente.id,
            item_uno_id=item_uno_id,
            item_dos_id=item_dos_id)
        self.serializer_class = CotizacionComponenteConDetalleSerializer
        serializer = self.get_serializer(cotizacion_componente)
        return Response(serializer.data)

    @action(detail=False, http_method_names=[
        'get',
    ])
    def cotizaciones_por_estado(self, request):
        estado = self.request.GET.get('estado')
        if estado == 'TOD':
            lista = self.queryset
        else:
            lista = self.queryset.filter(Q(estado=estado) | Q(estado='INI'))
        user = self.request.user

        ver_todas = request.user.has_perm(
            'cotizaciones_componentes.list_todos_vendedores_cotizacioncomponente'
        )

        if not (user.is_superuser or ver_todas):
            lista = lista.filter(
                (Q(responsable__isnull=True) & Q(creado_por=user))
                | (Q(responsable__isnull=False) & Q(responsable=user)))

        serializer = self.get_serializer(lista, many=True)
        return Response(serializer.data)

    @action(detail=False, http_method_names=[
        'get',
    ])
    def cotizaciones_por_cliente(self, request):
        cliente_id = self.request.GET.get('cliente_id')
        self.serializer_class = CotizacionComponenteConDetalleSerializer
        lista = self.queryset.filter(cliente_id=cliente_id)
        serializer = self.get_serializer(lista, many=True)
        return Response(serializer.data)

    @action(detail=False, http_method_names=[
        'get',
    ])
    def cotizaciones_tuberia_ventas(self, request):
        self.serializer_class = CotizacionComponenteTuberiaVentasSerializer
        lista = CotizacionComponente.objects.using(
            'read_only').prefetch_related(
                'responsable',
                'responsable__mi_colaborador',
                'creado_por',
                'cliente',
            ).exclude(estado__in=['INI', 'ELI', 'FIN'])
        serializer = self.get_serializer(lista, many=True)
        return Response(serializer.data)

    @action(detail=False, http_method_names=[
        'get',
    ])
    def cotizaciones_por_cliente_para_relacionar_factura(self, request):
        cliente_id = self.request.GET.get('cliente_id')
        self.serializer_class = CotizacionComponenteConDetalleSerializer
        lista = self.queryset.using('read_only').filter(
            Q(cliente_id=cliente_id)
            & Q(estado__in=['PRO', 'FIN', 'ENV', 'REC']))
        user = self.request.user
        if not user.is_superuser:
            lista = lista.filter(
                (Q(responsable__isnull=True) & Q(creado_por=user))
                | (Q(responsable__isnull=False) & Q(responsable=user)))
        serializer = self.get_serializer(lista, many=True)
        return Response(serializer.data)

    @action(detail=False, http_method_names=[
        'get',
    ])
    def cotizaciones_en_edicion_asesor(self, request):
        user = self.request.user
        self.serializer_class = CotizacionComponenteConDetalleSerializer
        lista = self.queryset.using('read_only').filter(estado='INI')
        if not user.is_superuser:
            lista = self.queryset.filter(estado='INI').filter(
                (Q(responsable__isnull=True) & Q(creado_por=user))
                | (Q(responsable__isnull=False) & Q(responsable=user)))
        serializer = self.get_serializer(lista, many=True)
        return Response(serializer.data)

    @action(detail=True, methods=['post'])
    def upload_archivo(self, request, pk=None):
        cotizacion_componente = self.get_object()
        nombre_archivo = self.request.POST.get('nombre')
        archivo = self.request.FILES['archivo']
        tipo = self.request.POST.get('tipo')
        adjunto_cotizacion = CotizacionComponenteAdjunto()
        if tipo == 'imagen':
            adjunto_cotizacion.imagen = archivo
        elif tipo == 'archivo':
            adjunto_cotizacion.adjunto = archivo
        adjunto_cotizacion.cotizacion_componente = cotizacion_componente
        adjunto_cotizacion.nombre_adjunto = nombre_archivo
        adjunto_cotizacion.creado_por = self.request.user
        adjunto_cotizacion.save()
        serializer = self.get_serializer(cotizacion_componente)
        return Response(serializer.data)

    @action(detail=True, methods=['post'])
    def delete_archivo(self, request, pk=None):
        cotizacion_componente = self.get_object()
        adjunto_id = self.request.POST.get('adjunto_id')
        adjunto = CotizacionComponenteAdjunto.objects.get(pk=adjunto_id)
        adjunto.delete()
        serializer = self.get_serializer(cotizacion_componente)
        return Response(serializer.data)
Ejemplo n.º 5
0
    def get(self, request, organization):
        """
        List an Organization's Releases
        ```````````````````````````````
        Return a list of releases for a given organization.

        :pparam string organization_slug: the organization short name
        :qparam string query: this parameter can be used to create a
                              "starts with" filter for the version.
        """
        query = request.GET.get("query")
        with_health = request.GET.get("health") == "1"
        status_filter = request.GET.get("status", "open")
        flatten = request.GET.get("flatten") == "1"
        sort = request.GET.get("sort") or "date"
        health_stat = request.GET.get("healthStat") or "sessions"
        summary_stats_period = request.GET.get("summaryStatsPeriod") or "14d"
        health_stats_period = request.GET.get("healthStatsPeriod") or (
            "24h" if with_health else "")
        if summary_stats_period not in STATS_PERIODS:
            raise ParseError(detail=get_stats_period_detail(
                "summaryStatsPeriod", STATS_PERIODS))
        if health_stats_period and health_stats_period not in STATS_PERIODS:
            raise ParseError(detail=get_stats_period_detail(
                "healthStatsPeriod", STATS_PERIODS))
        if health_stat not in ("sessions", "users"):
            raise ParseError(detail="invalid healthStat")

        paginator_cls = OffsetPaginator
        paginator_kwargs = {}

        try:
            filter_params = self.get_filter_params(request,
                                                   organization,
                                                   date_filter_optional=True)
        except NoProjects:
            return Response([])

        # This should get us all the projects into postgres that have received
        # health data in the last 24 hours.  If health data is not requested
        # we don't upsert releases.
        if with_health:
            debounce_update_release_health_data(organization,
                                                filter_params["project_id"])

        queryset = Release.objects.filter(organization=organization)

        if status_filter:
            try:
                status_int = ReleaseStatus.from_string(status_filter)
            except ValueError:
                raise ParseError(detail="invalid value for status")

            if status_int == ReleaseStatus.OPEN:
                queryset = queryset.filter(
                    Q(status=status_int) | Q(status=None))
            else:
                queryset = queryset.filter(status=status_int)

        queryset = queryset.select_related("owner").annotate(date=Coalesce(
            "date_released", "date_added"), )

        queryset = add_environment_to_queryset(queryset, filter_params)

        if query:
            query_q = Q(version__icontains=query)

            suffix_match = _release_suffix.match(query)
            if suffix_match is not None:
                query_q |= Q(version__icontains="%s+%s" %
                             suffix_match.groups())

            queryset = queryset.filter(query_q)

        select_extra = {}

        queryset = queryset.distinct()
        if flatten:
            select_extra[
                "_for_project_id"] = "sentry_release_project.project_id"

        if sort == "date":
            queryset = queryset.filter(
                projects__id__in=filter_params["project_id"]).order_by("-date")
            paginator_kwargs["order_by"] = "-date"
        elif sort in (
                "crash_free_sessions",
                "crash_free_users",
                "sessions",
                "users",
                "sessions_24h",
                "users_24h",
        ):
            if not flatten:
                return Response(
                    {
                        "detail":
                        "sorting by crash statistics requires flattening (flatten=1)"
                    },
                    status=400,
                )
            paginator_cls = MergingOffsetPaginator
            paginator_kwargs.update(
                data_load_func=lambda offset, limit:
                get_project_releases_by_stability(
                    project_ids=filter_params["project_id"],
                    environments=filter_params.get("environment"),
                    scope=sort,
                    offset=offset,
                    stats_period=summary_stats_period,
                    limit=limit,
                ),
                apply_to_queryset=lambda queryset, rows: queryset.filter(
                    projects__id__in=list(x[0] for x in rows),
                    version__in=list(x[1] for x in rows)),
                key_from_model=lambda x: (x._for_project_id, x.version),
            )
        else:
            return Response({"detail": "invalid sort"}, status=400)

        queryset = queryset.extra(select=select_extra)
        queryset = add_date_filter_to_queryset(queryset, filter_params)

        return self.paginate(
            request=request,
            queryset=queryset,
            paginator_cls=paginator_cls,
            on_results=lambda x: serialize(
                x,
                request.user,
                with_health_data=with_health,
                health_stat=health_stat,
                health_stats_period=health_stats_period,
                summary_stats_period=summary_stats_period,
                environments=filter_params.get("environment") or None,
            ),
            **paginator_kwargs)
Ejemplo n.º 6
0
 def __init__(self, provider, report_type):
     """Constructor."""
     self._mapping = [
         {
             'provider':
             Provider.PROVIDER_AZURE,
             'alias':
             'subscription_guid',  # FIXME: probably wrong
             'annotations': {},
             'end_date':
             'costentrybill__billing_period_end',
             'filters': {
                 'subscription_guid': [
                     {
                         'field': 'subscription_guid',
                         'operation': 'icontains',
                         'composition_key': 'account_filter'
                     },
                 ],
                 'service_name': {
                     'field': 'service_name',
                     'operation': 'icontains'
                 },
                 'resource_location': {
                     'field': 'resource_location',
                     'operation': 'icontains'
                 },
                 'instance_type': {
                     'field': 'instance_type',
                     'operation': 'icontains'
                 }
             },
             'group_by_options': [
                 'service_name', 'subscription_guid', 'resource_location',
                 'instance_type'
             ],
             'tag_column':
             'tags',
             'report_type': {
                 'costs': {
                     'aggregates': {
                         'cost':
                         Sum(
                             Coalesce(F('pretax_cost'),
                                      Value(0,
                                            output_field=DecimalField())) +
                             Coalesce(F('markup_cost'),
                                      Value(0, output_field=DecimalField()))
                         ),
                         'infrastructure_cost':
                         Sum('pretax_cost'),
                         'derived_cost':
                         Sum(Value(0, output_field=DecimalField())),
                         'markup_cost':
                         Sum(
                             Coalesce(F('markup_cost'),
                                      Value(0,
                                            output_field=DecimalField()))),
                     },
                     'aggregate_key':
                     'pretax_cost',
                     'annotations': {
                         'cost':
                         Sum(
                             Coalesce(F('pretax_cost'),
                                      Value(0,
                                            output_field=DecimalField())) +
                             Coalesce(F('markup_cost'),
                                      Value(0, output_field=DecimalField()))
                         ),
                         'infrastructure_cost':
                         Sum('pretax_cost'),
                         'derived_cost':
                         Value(0, output_field=DecimalField()),
                         'markup_cost':
                         Sum(
                             Coalesce(F('markup_cost'),
                                      Value(0,
                                            output_field=DecimalField()))),
                         'cost_units':
                         Coalesce(Max('currency'), Value('USD'))
                     },
                     'delta_key': {
                         'cost':
                         Sum(
                             Coalesce(F('pretax_cost'),
                                      Value(0,
                                            output_field=DecimalField())) +
                             Coalesce(F('markup_cost'),
                                      Value(0, output_field=DecimalField()))
                         )
                     },
                     'filter': [{}],
                     'cost_units_key':
                     'currency',
                     'cost_units_fallback':
                     'USD',
                     'sum_columns': [
                         'cost', 'infrastructure_cost', 'derived_cost',
                         'markup_cost'
                     ],
                     'default_ordering': {
                         'cost': 'desc'
                     },
                 },
                 'instance_type': {
                     'aggregates': {
                         'cost':
                         Sum(
                             Coalesce(F('pretax_cost'),
                                      Value(0,
                                            output_field=DecimalField())) +
                             Coalesce(F('markup_cost'),
                                      Value(0, output_field=DecimalField()))
                         ),
                         'infrastructure_cost':
                         Sum('pretax_cost'),
                         'derived_cost':
                         Sum(Value(0, output_field=DecimalField())),
                         'markup_cost':
                         Sum(
                             Coalesce(F('markup_cost'),
                                      Value(0,
                                            output_field=DecimalField()))),
                         'count':
                         Sum(Value(0, output_field=DecimalField())),
                         'usage':
                         Sum('usage_quantity'),
                     },
                     'aggregate_key':
                     'usage_quantity',
                     'annotations': {
                         'cost':
                         Sum(
                             Coalesce(F('pretax_cost'),
                                      Value(0,
                                            output_field=DecimalField())) +
                             Coalesce(F('markup_cost'),
                                      Value(0, output_field=DecimalField()))
                         ),
                         'infrastructure_cost':
                         Sum('pretax_cost'),
                         'derived_cost':
                         Value(0, output_field=DecimalField()),
                         'markup_cost':
                         Sum(
                             Coalesce(F('markup_cost'),
                                      Value(0,
                                            output_field=DecimalField()))),
                         'cost_units':
                         Coalesce(Max('currency'), Value('USD')),
                         'count':
                         Max('instance_count'),
                         'count_units':
                         Value('instance_types', output_field=CharField()),
                         'usage':
                         Sum('usage_quantity'),
                         # FIXME: Waiting on MSFT for usage_units default
                         'usage_units':
                         Coalesce(Max('unit_of_measure'),
                                  Value('Instance Type Placeholder'))
                     },
                     'delta_key': {
                         'usage': Sum('usage_quantity')
                     },
                     'filter': [{
                         'field': 'instance_type',
                         'operation': 'isnull',
                         'parameter': False
                     }],
                     'group_by': ['instance_type'],
                     'cost_units_key':
                     'currency',
                     'cost_units_fallback':
                     'USD',
                     'usage_units_key':
                     'unit_of_measure',
                     'usage_units_fallback':
                     'Instance Type Placeholder',  # FIXME: Waiting on MSFT
                     'count_units_fallback':
                     'instances',
                     'sum_columns': [
                         'usage', 'cost', 'infrastructure_cost',
                         'derived_cost', 'markup_cost', 'count'
                     ],
                     'default_ordering': {
                         'usage': 'desc'
                     },
                 },
                 'storage': {
                     'aggregates': {
                         'cost':
                         Sum(
                             Coalesce(F('pretax_cost'),
                                      Value(0,
                                            output_field=DecimalField())) +
                             Coalesce(F('markup_cost'),
                                      Value(0, output_field=DecimalField()))
                         ),
                         'usage':
                         Sum('usage_quantity'),
                         'infrastructure_cost':
                         Sum('pretax_cost'),
                         'markup_cost':
                         Sum(
                             Coalesce(F('markup_cost'),
                                      Value(0,
                                            output_field=DecimalField()))),
                         'derived_cost':
                         Sum(Value(0, output_field=DecimalField())),
                     },
                     'aggregate_key':
                     'usage_quantity',
                     'annotations': {
                         'cost':
                         Sum(
                             Coalesce(F('pretax_cost'),
                                      Value(0,
                                            output_field=DecimalField())) +
                             Coalesce(F('markup_cost'),
                                      Value(0, output_field=DecimalField()))
                         ),
                         'infrastructure_cost':
                         Sum('pretax_cost'),
                         'derived_cost':
                         Value(0, output_field=DecimalField()),
                         'markup_cost':
                         Sum(
                             Coalesce(F('markup_cost'),
                                      Value(0,
                                            output_field=DecimalField()))),
                         'cost_units':
                         Coalesce(Max('currency'), Value('USD')),
                         'usage':
                         Sum('usage_quantity'),
                         # FIXME: Waiting on MSFT for usage_units default
                         'usage_units':
                         Coalesce(Max('unit_of_measure'),
                                  Value('Storage Type Placeholder'))
                     },
                     'delta_key': {
                         'usage': Sum('usage_quantity')
                     },
                     'filter': [{
                         'field': 'service_name',
                         'operation': 'contains',
                         'parameter': 'Storage'
                     }],
                     'cost_units_key':
                     'currency',
                     'cost_units_fallback':
                     'USD',
                     'usage_units_key':
                     'unit_of_measure',
                     'usage_units_fallback':
                     'Storage Type Placeholder',  # FIXME
                     'sum_columns': [
                         'usage', 'cost', 'infrastructure_cost',
                         'derived_cost', 'markup_cost'
                     ],
                     'default_ordering': {
                         'usage': 'desc'
                     },
                 },
                 'tags': {
                     'default_ordering': {
                         'cost': 'desc'
                     },
                 },
             },
             'start_date':
             'costentrybill__billing_period_start',
             'tables': {
                 'query': AzureCostEntryLineItemDailySummary,
             },
         },
     ]
     super().__init__(provider, report_type)
Ejemplo n.º 7
0
    def do(self):
        timestamp = timezone.now()  # make sure all entries share same timestamp

        # generate general statistics
        dailyStatisticsGeneral = DailyStatisticsGeneral()

        dailyStatisticsGeneral.timestamp = timestamp
        dailyStatisticsGeneral.persons = Person.objects.count()
        dailyStatisticsGeneral.children_male = Person.objects.filter(membertype=Person.CHILD, gender=Person.MALE).count()
        dailyStatisticsGeneral.children_female = Person.objects.filter(membertype=Person.CHILD, gender=Person.FEMALE).count()
        dailyStatisticsGeneral.children = dailyStatisticsGeneral.children_male + dailyStatisticsGeneral.children_female
        dailyStatisticsGeneral.volunteers_male = Person.objects.filter(gender=Person.MALE, volunteer__isnull=False).count()
        dailyStatisticsGeneral.volunteers_female = Person.objects.filter(gender=Person.FEMALE, volunteer__isnull=False).count()
        dailyStatisticsGeneral.volunteers = dailyStatisticsGeneral.volunteers_male + dailyStatisticsGeneral.volunteers_female
        dailyStatisticsGeneral.departments = Department.objects.filter(closed_dtm=None).count()
        dailyStatisticsGeneral.unions = Union.objects.count()
        dailyStatisticsGeneral.waitinglist_male = Person.objects.filter(waitinglist__isnull=False, gender=Person.MALE).distinct().count()
        dailyStatisticsGeneral.waitinglist_female = Person.objects.filter(waitinglist__isnull=False, gender=Person.FEMALE).distinct().count()
        dailyStatisticsGeneral.waitinglist = dailyStatisticsGeneral.waitinglist_male + dailyStatisticsGeneral.waitinglist_female
        dailyStatisticsGeneral.family_visits = Family.objects.filter(last_visit_dtm__gt=(timestamp-datetime.timedelta(days=1))).count()
        dailyStatisticsGeneral.dead_profiles = Family.objects.filter(last_visit_dtm__lt=(timestamp-datetime.timedelta(days=365))).count()
        dailyStatisticsGeneral.current_activity_participants = Person.objects.filter(member__activityparticipant__activity__end_date__gte=timestamp,
                                                                                     member__activityparticipant__activity__start_date__lte=timestamp ).distinct().count()
        dailyStatisticsGeneral.activity_participants_male = Person.objects.filter(member__activityparticipant__activity__isnull=False, gender=Person.MALE).distinct().count()
        dailyStatisticsGeneral.activity_participants_female = Person.objects.filter(member__activityparticipant__activity__isnull=False, gender=Person.FEMALE).distinct().count()
        dailyStatisticsGeneral.activity_participants = dailyStatisticsGeneral.activity_participants_male + dailyStatisticsGeneral.activity_participants_female
        dailyStatisticsGeneral.payments = Payment.objects.filter(refunded_dtm=None, confirmed_dtm__isnull=False).aggregate(sum=Coalesce(Sum('amount_ore'), 0))['sum']
        dailyStatisticsGeneral.payments_transactions = Payment.objects.filter(refunded_dtm=None, confirmed_dtm__isnull=False).count()
        dailyStatisticsGeneral.save()

        # generate daily department statistics
        departments = Department.objects.filter(closed_dtm=None)
        for department in departments:
            dailyStatisticsDepartment = DailyStatisticsDepartment()

            dailyStatisticsDepartment.timestamp = timestamp
            dailyStatisticsDepartment.department = department
            dailyStatisticsDepartment.active_activities = Activity.objects.filter(department=department,
                                                                                  start_date__lte=timestamp,
                                                                                  end_date__gte=timestamp).count()
            dailyStatisticsDepartment.activities = Activity.objects.filter(department=department).count()
            dailyStatisticsDepartment.current_activity_participants = Person.objects.filter(member__activityparticipant__activity__start_date__lte=timestamp,
                                                                                            member__activityparticipant__activity__end_date__gte=timestamp,
                                                                                            member__activityparticipant__activity__department=department).distinct().count()
            dailyStatisticsDepartment.activity_participants = ActivityParticipant.objects.filter(activity__department=department).count()
            dailyStatisticsDepartment.members = 0 # TODO: to loosely defined now
            dailyStatisticsDepartment.waitinglist = Person.objects.filter(waitinglist__department=department).distinct().count()
            firstWaitingListItem = WaitingList.objects.filter(department=department).order_by('on_waiting_list_since').first()
            if firstWaitingListItem:
                dailyStatisticsDepartment.waitingtime = timestamp.date() - firstWaitingListItem.on_waiting_list_since
            else:
                dailyStatisticsDepartment.waitingtime = datetime.timedelta(days=0)
            dailyStatisticsDepartment.payments = Payment.objects.filter(activity__department=department,
                                                                        refunded_dtm=None,
                                                                        confirmed_dtm__isnull=False).aggregate(sum=Coalesce(Sum('amount_ore'), 0))['sum']
            dailyStatisticsDepartment.volunteers_male = Person.objects.filter(volunteer__department=department, gender=Person.MALE).distinct().count()
            dailyStatisticsDepartment.volunteers_female = Person.objects.filter(volunteer__department=department, gender=Person.FEMALE).distinct().count()
            dailyStatisticsDepartment.volunteers = dailyStatisticsDepartment.volunteers_male + dailyStatisticsDepartment.volunteers_female

            dailyStatisticsDepartment.save()

        # generate daily union statistics
        unions = Union.objects.all()
        for union in unions:
            dailyStatisticsUnion = DailyStatisticsUnion()

            dailyStatisticsUnion.timestamp = timestamp
            dailyStatisticsUnion.union = union
            dailyStatisticsUnion.departments = Department.objects.filter(union=union).count()
            dailyStatisticsUnion.active_activities = Activity.objects.filter(department__union=union,
                                                                             start_date__lte=timestamp,
                                                                             end_date__gte=timestamp).count()
            dailyStatisticsUnion.activities = Activity.objects.filter(department__union=union).count()
            dailyStatisticsUnion.current_activity_participants = Person.objects.filter(member__activityparticipant__activity__start_date__lte=timestamp,
                                                                                       member__activityparticipant__activity__end_date__gte=timestamp,
                                                                                        member__activityparticipant__activity__department__union=union).distinct().count()
            dailyStatisticsUnion.activity_participants = ActivityParticipant.objects.filter(activity__department__union=union).count()
            dailyStatisticsUnion.members = 0 # TODO: to loosely defined now
            dailyStatisticsUnion.waitinglist = Person.objects.filter(waitinglist__department__union=union).distinct().count()
            dailyStatisticsUnion.payments = Payment.objects.filter(activity__department__union=union,
                                                                    refunded_dtm=None,
                                                                    confirmed_dtm__isnull=False).aggregate(sum=Coalesce(Sum('amount_ore'), 0))['sum']
            dailyStatisticsUnion.volunteers_male = Person.objects.filter(volunteer__department__union=union, gender=Person.MALE).distinct().count()
            dailyStatisticsUnion.volunteers_female = Person.objects.filter(volunteer__department__union=union, gender=Person.FEMALE).distinct().count()
            dailyStatisticsUnion.volunteers = dailyStatisticsUnion.volunteers_male + dailyStatisticsUnion.volunteers_female

            dailyStatisticsUnion.save()

        # generate daily region statistics
        regions = ('DK01', 'DK02', 'DK03', 'DK04', 'DK05')
        for region in regions:
            dailyStatisticsRegion = DailyStatisticsRegion()

            zipsInRegion = ZipcodeRegion.objects.filter(region=region).values_list('zipcode', flat=True) # There are no easy foreing key to identify region

            dailyStatisticsRegion.timestamp = timestamp
            dailyStatisticsRegion.region = region
            # No unions - since unions may span regions
            dailyStatisticsRegion.departments = Department.objects.annotate().filter(zipcode__in=zipsInRegion).count()
            dailyStatisticsRegion.active_activities = Activity.objects.filter(department__zipcode__in=zipsInRegion,
                                                                              start_date__lte=timestamp,
                                                                              end_date__gte=timestamp).count()
            dailyStatisticsRegion.activities = Activity.objects.filter(department__zipcode__in=zipsInRegion).count()
            dailyStatisticsRegion.current_activity_participants = Person.objects.filter(member__activityparticipant__activity__start_date__lte=timestamp,
                                                                                        member__activityparticipant__activity__end_date__gte=timestamp,
                                                                                        member__activityparticipant__activity__department__zipcode__in=zipsInRegion).distinct().count()
            dailyStatisticsRegion.activity_participants = ActivityParticipant.objects.filter(activity__department__zipcode__in=zipsInRegion).count()
            dailyStatisticsRegion.members = 0 # TODO: to loosely defined now
            dailyStatisticsRegion.waitinglist = Person.objects.filter(waitinglist__department__zipcode__in=zipsInRegion).distinct().count()
            dailyStatisticsRegion.payments = Payment.objects.filter(activity__department__zipcode__in=zipsInRegion,
                                                                    refunded_dtm=None,
                                                                    confirmed_dtm__isnull=False).aggregate(sum=Coalesce(Sum('amount_ore'), 0))['sum']
            dailyStatisticsRegion.volunteers_male = Person.objects.filter(volunteer__department__zipcode__in=zipsInRegion, gender=Person.MALE).distinct().count()
            dailyStatisticsRegion.volunteers_female = Person.objects.filter(volunteer__department__zipcode__in=zipsInRegion, gender=Person.FEMALE).distinct().count()
            dailyStatisticsRegion.volunteers = dailyStatisticsRegion.volunteers_male + dailyStatisticsRegion.volunteers_female

            dailyStatisticsRegion.save()
Ejemplo n.º 8
0
    def execute_individual_query(self, org_unit_applied=False):  # noqa: C901
        """Execute query and return provided data.

        Returns:
            (Dict): Dictionary response of query params, data, and total

        """
        data = []

        with tenant_context(self.tenant):
            query_table = self.query_table
            LOG.debug(f"Using query table: {query_table}")
            tag_results = None
            query = query_table.objects.filter(self.query_filter)
            query_data = query.annotate(**self.annotations)
            query_group_by = ["date"] + self._get_group_by()
            query_order_by = ["-date"]
            query_order_by.extend([self.order])  # add implicit ordering

            annotations = copy.deepcopy(
                self._mapper.report_type_map.get("annotations", {}))
            if not self.parameters.parameters.get("compute_count"):
                # Query parameter indicates count should be removed from DB queries
                annotations.pop("count", None)
                annotations.pop("count_units", None)

            query_data = query_data.values(*query_group_by).annotate(
                **annotations)

            if "account" in query_group_by:
                query_data = query_data.annotate(account_alias=Coalesce(
                    F(self._mapper.provider_map.get("alias")),
                    "usage_account_id"))

                if self.parameters.parameters.get("check_tags"):
                    tag_results = self._get_associated_tags(
                        query_table, self.query_filter)

            query_sum = self._build_sum(query, annotations)

            if self._limit and query_data and not org_unit_applied:
                query_data = self._group_by_ranks(query, query_data)
                if not self.parameters.get("order_by"):
                    # override implicit ordering when using ranked ordering.
                    query_order_by[-1] = "rank"

            if self._delta:
                query_data = self.add_deltas(query_data, query_sum)

            query_data = self.order_by(query_data, query_order_by)

            # Fetch the data (returning list(dict))
            query_results = list(query_data)

            # Resolve tag exists for unique account returned
            # if tag_results is not Falsey
            # Append the flag to the query result for the report
            if tag_results is not None:
                # Add the tag results to the report query result dicts
                for res in query_results:
                    res["tags_exist"] = tag_results.get(
                        res["account_alias"], False)

            if not self.is_csv_output:
                groups = copy.deepcopy(query_group_by)
                groups.remove("date")
                data = self._apply_group_by(query_results, groups)
                data = self._transform_data(query_group_by, 0, data)
            else:
                data = query_results

        key_order = list(["units"] + list(annotations.keys()))
        ordered_total = {
            total_key: query_sum[total_key]
            for total_key in key_order if total_key in query_sum
        }
        ordered_total.update(query_sum)

        query_sum = ordered_total
        query_data = data
        return query_data, query_sum
Ejemplo n.º 9
0
class RackRoleViewSet(ModelViewSet):
    queryset = RackRole.objects.annotate(
        rack_count=Coalesce(get_subquery(Rack, 'role'), 0))
    serializer_class = serializers.RackRoleSerializer
    filterset_class = filters.RackRoleFilterSet
Ejemplo n.º 10
0
 def goals_lost(self, obj):
     home_played = Match.objects.filter(Q(host_team=obj))
     away_played = Match.objects.filter(Q(guest_team=obj))
     home_lost = home_played.aggregate(goals=Coalesce(Sum('guest_team_goals', output_field=IntegerField()), 0))
     away_lost = away_played.aggregate(goals=Coalesce(Sum('host_team_goals', output_field=IntegerField()), 0))
     return home_lost['goals'] + away_lost['goals']
Ejemplo n.º 11
0
def ajax_upload(request, folder_id=None):
    folder = None
    path = request.POST.get('path')
    path_split = path.split('/') if path else []

    # check permissions and data
    error_msg = None
    if not request.user.is_authenticated:
        # User is not logged in. Return a generic message that gives
        # no data info (such as whether a folder exists or not)
        error_msg = filer.admin.clipboardadmin.NO_PERMISSIONS_FOR_FOLDER
    elif folder_id:
        try:
            folder = Folder.objects.get(pk=folder_id)
        except Folder.DoesNotExist:
            # A folder with id=folder_id does not exist so return
            # an error message specifying this
            error_msg = filer.admin.clipboardadmin.NO_FOLDER_ERROR
        else:
            # Now check if the user has sufficient permissions to
            # upload a file to the folder with id=folder_id and return
            # an error message if not
            no_folder_perms = (not folder.has_add_children_permission(request)
                               or (path and not folder.can_have_subfolders))
            if no_folder_perms:
                error_msg = filer.admin.clipboardadmin.NO_PERMISSIONS_FOR_FOLDER
    elif (not request.user.is_superuser and path_split and
          not filer.settings.FILER_ALLOW_REGULAR_USERS_TO_ADD_ROOT_FOLDERS):
        # If uploading the file to Unsorted Uploads (i.e. no folder_id)
        # but filer is set to disallow regular users to add
        # folders there and the user is not a superuser and is uploading
        # folders that aren't yet created on the server (i.e.
        # specifying the path param with folders that don't yet exist)
        # return an error message
        if not Folder.objects.filter(name=path_split[0], parent=None).exists():
            error_msg = filer.admin.clipboardadmin.NO_PERMISSIONS_FOR_FOLDER

    if error_msg:
        return JsonResponse({'error': error_msg})

    try:
        if len(request.FILES) == 1:
            # dont check if request is ajax or not, just grab the file
            upload, filename, is_raw, mime_type = handle_request_files_upload(
                request)
        else:
            # else process the request as usual
            upload, filename, is_raw, mime_type = handle_upload(request)

        # find the file type
        for filer_class in filer_settings.FILER_FILE_MODELS:
            FileSubClass = load_model(filer_class)
            # TODO: What if there are more than one that qualify?
            if FileSubClass.matches_file_type(filename, upload, mime_type):
                FileForm = modelform_factory(model=FileSubClass,
                                             fields=('original_filename',
                                                     'owner', 'file'))
                break
        uploadform = FileForm(
            {
                'original_filename': filename,
                'owner': request.user.pk
            }, {'file': upload})
        if uploadform.is_valid():
            file_obj = uploadform.save(commit=False)
            # Enforce the FILER_IS_PUBLIC_DEFAULT
            file_obj.is_public = filer_settings.FILER_IS_PUBLIC_DEFAULT

            # Set the file's folder
            current_folder = folder
            for segment in path_split:
                try:
                    current_folder = Folder.objects.get(name=segment,
                                                        parent=current_folder)
                except Folder.DoesNotExist:
                    # If the current_folder can't have subfolders then
                    # return a permission error
                    if current_folder and not current_folder.can_have_subfolders:
                        error_msg = filer.admin.clipboardadmin.NO_PERMISSIONS_FOR_FOLDER
                        return JsonResponse({'error': error_msg})
                    current_folder = Folder.objects.create(
                        name=segment, parent=current_folder)
                else:
                    # If the folder already exists, check the user is
                    # allowed to upload here
                    if not current_folder.has_add_children_permission(request):
                        error_msg = filer.admin.clipboardadmin.NO_PERMISSIONS_FOR_FOLDER
                        return JsonResponse({'error': error_msg})
            file_obj.folder = current_folder
            file_obj.mime_type = mime_type

            same_name_file_qs = get_files_distinct_grouper_queryset().annotate(
                _name=NullIfEmptyStr('name'),
                _original_filename=NullIfEmptyStr('original_filename'),
            ).annotate(
                # seperate annotate is needed to get it work on python<36
                # see PEP 468 for more details
                _label=Coalesce('_name', '_original_filename',
                                Value('unnamed file')), ).filter(
                                    folder=folder, _label=file_obj.label)
            existing_file_obj = same_name_file_qs.first()

            if existing_file_obj:
                file_grouper = existing_file_obj.grouper
                new_file_grouper = False

                existing_file_version = Version.objects.get_for_content(
                    existing_file_obj)
                if existing_file_version.state == DRAFT and not all([
                        existing_file_version.can_be_archived(),
                        existing_file_version.check_archive.as_bool(
                            request.user),
                ]):
                    return JsonResponse({
                        'error':
                        ('Cannot archive existing {} file version'.format(
                            existing_file_obj))
                    })
            else:
                new_file_grouper = True
                file_grouper = FileGrouper.objects.create()

            file_obj.grouper = file_grouper
            file_obj.save()
            create_file_version(file_obj, request.user)

            # Try to generate thumbnails.
            if not file_obj.icons:
                # There is no point to continue, as we can't generate
                # thumbnails for this file. Usual reasons: bad format or
                # filename.
                file_obj.delete()
                if new_file_grouper:
                    file_grouper.delete()
                # This would be logged in BaseImage._generate_thumbnails()
                # if FILER_ENABLE_LOGGING is on.
                return JsonResponse(
                    {'error': 'failed to generate icons for file'},
                    status=500,
                )
            thumbnail = None
            # Backwards compatibility: try to get specific icon size (32px)
            # first. Then try medium icon size (they are already sorted),
            # fallback to the first (smallest) configured icon.
            for size in (['32'] +
                         filer_settings.FILER_ADMIN_ICON_SIZES[1::-1]):
                try:
                    thumbnail = file_obj.icons[size]
                    break
                except KeyError:
                    continue

            data = {
                'thumbnail': thumbnail,
                'alt_text': '',
                'label': str(file_obj),
                'file_id': file_obj.pk,
                'grouper_id': file_grouper.pk,
            }
            # prepare preview thumbnail
            if type(file_obj) == Image:
                thumbnail_180_options = {
                    'size': (180, 180),
                    'crop': True,
                    'upscale': True,
                }
                thumbnail_180 = file_obj.file.get_thumbnail(
                    thumbnail_180_options)
                data['thumbnail_180'] = thumbnail_180.url
                data['original_image'] = file_obj.url
            return JsonResponse(data)
        else:
            form_errors = '; '.join([
                '%s: %s' % (field, ', '.join(errors))
                for field, errors in list(uploadform.errors.items())
            ])
            raise UploadException("AJAX request not valid: form invalid '%s'" %
                                  (form_errors, ))
    except UploadException as e:
        # TODO: Test
        return JsonResponse({'error': str(e)}, status=500)
Ejemplo n.º 12
0
class CheckInFilterForm(FilterForm):
    orders = {
        'code': ('order__code', 'item__name'),
        '-code': ('-order__code', '-item__name'),
        'email': ('order__email', 'item__name'),
        '-email': ('-order__email', '-item__name'),
        'status': (FixedOrderBy(F('last_checked_in'),
                                nulls_first=True,
                                descending=True), 'order__code'),
        '-status': (FixedOrderBy(F('last_checked_in'),
                                 nulls_last=True), '-order__code'),
        'timestamp': (FixedOrderBy(F('last_checked_in'),
                                   nulls_first=True), 'order__code'),
        '-timestamp': (FixedOrderBy(F('last_checked_in'),
                                    nulls_last=True,
                                    descending=True), '-order__code'),
        'item': ('item__name', 'variation__value', 'order__code'),
        '-item': ('-item__name', '-variation__value', '-order__code'),
        'name': {
            '_order':
            F('display_name').asc(nulls_first=True),
            'display_name':
            Coalesce('attendee_name_cached', 'addon_to__attendee_name_cached')
        },
        '-name': {
            '_order':
            F('display_name').desc(nulls_last=True),
            'display_name':
            Coalesce('attendee_name_cached', 'addon_to__attendee_name_cached')
        },
    }

    user = forms.CharField(
        label=_('Search attendee…'),
        widget=forms.TextInput(attrs={
            'placeholder': _('Search attendee…'),
            'autofocus': 'autofocus'
        }),
        required=False)
    status = forms.ChoiceField(
        label=_('Check-in status'),
        choices=(
            ('', _('All attendees')),
            ('1', _('Checked in')),
            ('0', _('Not checked in')),
        ),
        required=False,
    )
    item = forms.ModelChoiceField(label=_('Products'),
                                  queryset=Item.objects.none(),
                                  required=False,
                                  empty_label=_('All products'))

    def __init__(self, *args, **kwargs):
        self.event = kwargs.pop('event')
        self.list = kwargs.pop('list')
        super().__init__(*args, **kwargs)
        if self.list.all_products:
            self.fields['item'].queryset = self.event.items.all()
        else:
            self.fields['item'].queryset = self.list.limit_products.all()

    def filter_qs(self, qs):
        fdata = self.cleaned_data

        if fdata.get('user'):
            u = fdata.get('user')
            qs = qs.filter(
                Q(order__code__istartswith=u)
                | Q(secret__istartswith=u)
                | Q(order__email__icontains=u)
                | Q(attendee_name_cached__icontains=u)
                | Q(attendee_email__icontains=u)
                | Q(voucher__code__istartswith=u)
                | Q(order__invoice_address__name_cached__icontains=u)
                | Q(order__invoice_address__company__icontains=u))

        if fdata.get('status'):
            s = fdata.get('status')
            if s == '1':
                qs = qs.filter(last_checked_in__isnull=False)
            elif s == '0':
                qs = qs.filter(last_checked_in__isnull=True)

        if fdata.get('ordering'):
            ob = self.orders[fdata.get('ordering')]
            if isinstance(ob, dict):
                ob = dict(ob)
                o = ob.pop('_order')
                qs = qs.annotate(**ob).order_by(o)
            elif isinstance(ob, (list, tuple)):
                qs = qs.order_by(*ob)
            else:
                qs = qs.order_by(ob)

        if fdata.get('item'):
            qs = qs.filter(item=fdata.get('item'))

        return qs
Ejemplo n.º 13
0
class OrderPositionViewSet(mixins.DestroyModelMixin,
                           viewsets.ReadOnlyModelViewSet):
    serializer_class = OrderPositionSerializer
    queryset = OrderPosition.objects.none()
    filter_backends = (DjangoFilterBackend, OrderingFilter)
    ordering = ('order__datetime', 'positionid')
    ordering_fields = (
        'order__code',
        'order__datetime',
        'positionid',
        'attendee_name',
        'order__status',
    )
    filterset_class = OrderPositionFilter
    permission = 'can_view_orders'
    write_permission = 'can_change_orders'
    ordering_custom = {
        'attendee_name': {
            '_order':
            F('display_name').asc(nulls_first=True),
            'display_name':
            Coalesce('attendee_name_cached', 'addon_to__attendee_name_cached')
        },
        '-attendee_name': {
            '_order':
            F('display_name').asc(nulls_last=True),
            'display_name':
            Coalesce('attendee_name_cached', 'addon_to__attendee_name_cached')
        },
    }

    def get_queryset(self):
        qs = OrderPosition.objects.filter(order__event=self.request.event)
        if self.request.query_params.get('pdf_data', 'false') == 'true':
            qs = qs.prefetch_related(
                'checkins', 'answers', 'answers__options', 'answers__question',
                Prefetch(
                    'addons',
                    OrderPosition.objects.select_related('item', 'variation')),
                Prefetch(
                    'order',
                    Order.objects.select_related(
                        'invoice_address').prefetch_related(
                            Prefetch(
                                'event',
                                Event.objects.select_related('organizer')),
                            Prefetch(
                                'positions',
                                OrderPosition.objects.prefetch_related(
                                    'checkins',
                                    'item',
                                    'variation',
                                    'answers',
                                    'answers__options',
                                    'answers__question',
                                ))))).select_related('item', 'variation',
                                                     'item__category',
                                                     'addon_to')
        else:
            qs = qs.prefetch_related('checkins', 'answers', 'answers__options',
                                     'answers__question').select_related(
                                         'item', 'order', 'order__event',
                                         'order__event__organizer')
        return qs

    def _get_output_provider(self, identifier):
        responses = register_ticket_outputs.send(self.request.event)
        for receiver, response in responses:
            prov = response(self.request.event)
            if prov.identifier == identifier:
                return prov
        raise NotFound('Unknown output provider.')

    @action(detail=True,
            url_name='download',
            url_path='download/(?P<output>[^/]+)')
    def download(self, request, output, **kwargs):
        provider = self._get_output_provider(output)
        pos = self.get_object()

        if pos.order.status != Order.STATUS_PAID:
            raise PermissionDenied(
                "Downloads are not available for unpaid orders.")
        if not pos.generate_ticket:
            raise PermissionDenied(
                "Downloads are not enabled for this product.")

        ct = CachedTicket.objects.filter(order_position=pos,
                                         provider=provider.identifier,
                                         file__isnull=False).last()
        if not ct or not ct.file:
            generate.apply_async(args=('orderposition', pos.pk,
                                       provider.identifier))
            raise RetryException()
        else:
            resp = FileResponse(ct.file.file, content_type=ct.type)
            resp[
                'Content-Disposition'] = 'attachment; filename="{}-{}-{}-{}{}"'.format(
                    self.request.event.slug.upper(), pos.order.code,
                    pos.positionid, provider.identifier, ct.extension)
            return resp

    def perform_destroy(self, instance):
        try:
            ocm = OrderChangeManager(
                instance.order,
                user=self.request.user
                if self.request.user.is_authenticated else None,
                auth=self.request.auth,
                notify=False)
            ocm.cancel(instance)
            ocm.commit()
        except OrderError as e:
            raise ValidationError(str(e))
        except Quota.QuotaExceededException as e:
            raise ValidationError(str(e))
Ejemplo n.º 14
0
 def annotate_quantities(self):
     return self.annotate(
         quantity=Coalesce(Sum("stocks__quantity"), 0),
         quantity_allocated=Coalesce(
             Sum("stocks__allocations__quantity_allocated"), 0),
     )
Ejemplo n.º 15
0
def homepage_dashboard_view(request):
    findings = Finding.objects.all().only("status", "severity")
    assets = Asset.objects.all()
    global_stats = {
        "assets": {
            "total": assets.count(),
            "total_ag": AssetGroup.objects.all().count(),
        },
        "asset_types": {},
        "findings": {},
        "scans": {
            "defined":
            ScanDefinition.objects.all().count(),
            "performed":
            Scan.objects.all().count(),
            "active_periodic":
            ScanDefinition.objects.filter(enabled=True,
                                          scan_type='periodic').count(),
        },
        "engines": {
            "total":
            EngineInstance.objects.all().count(),
            "policies":
            EnginePolicy.objects.all().count(),
            "active":
            EngineInstance.objects.filter(status='READY',
                                          enabled=True).count(),
        },
        "rules": {
            "total": Rule.objects.all().count(),
            "active": Rule.objects.filter(enabled=True).count(),
            "nb_matches": 0,
        },
    }

    # asset types
    asset_types_stats_params = {}
    for at in ASSET_TYPES:
        asset_types_stats_params.update({
            at[0]:
            Coalesce(
                Sum(Case(When(type=at[0], then=1)),
                    output_field=models.IntegerField()), 0)
        })
    global_stats["asset_types"] = assets.aggregate(**asset_types_stats_params)

    # finding counters
    findings_stats = findings.aggregate(
        nb_new=Coalesce(
            Sum(Case(When(status='new', then=1)),
                output_field=models.IntegerField()), 0),
        nb_critical=Coalesce(
            Sum(Case(When(severity='critical', then=1)),
                output_field=models.IntegerField()), 0),
        nb_high=Coalesce(
            Sum(Case(When(severity='high', then=1)),
                output_field=models.IntegerField()), 0),
        nb_medium=Coalesce(
            Sum(Case(When(severity='medium', then=1)),
                output_field=models.IntegerField()), 0),
        nb_low=Coalesce(
            Sum(Case(When(severity='low', then=1)),
                output_field=models.IntegerField()), 0),
        nb_info=Coalesce(
            Sum(Case(When(severity='info', then=1)),
                output_field=models.IntegerField()), 0),
    )
    global_stats["findings"] = {
        # "total_raw": RawFinding.objects.count(),
        # "total_raw": RawFinding.objects.count(),
        "total": findings.count(),
        "new": findings_stats["nb_new"],
        "critical": findings_stats["nb_critical"],
        "high": findings_stats["nb_high"],
        "medium": findings_stats["nb_medium"],
        "low": findings_stats["nb_low"],
        "info": findings_stats["nb_info"],
    }

    # update nb_matches
    matches = 0
    for r in Rule.objects.all():
        matches += r.nb_matches
    global_stats["rules"].update({"nb_matches": matches})

    # Last 6 findings
    last_findings = Finding.objects.all().order_by('-id')[:6][::-1]

    # Last 6 scans
    last_scans = Scan.objects.all().order_by('-started_at')[:6]

    # Asset grades repartition and TOP 10
    asset_grades_map = {
        "A": {
            "high": 0,
            "medium": 0,
            "low": 0
        },
        "B": {
            "high": 0,
            "medium": 0,
            "low": 0
        },
        "C": {
            "high": 0,
            "medium": 0,
            "low": 0
        },
        "D": {
            "high": 0,
            "medium": 0,
            "low": 0
        },
        "E": {
            "high": 0,
            "medium": 0,
            "low": 0
        },
        "F": {
            "high": 0,
            "medium": 0,
            "low": 0
        },
        "-": {
            "high": 0,
            "medium": 0,
            "low": 0
        }
    }

    assetgroup_grades_map = copy.deepcopy(asset_grades_map)

    # Asset grades
    assets_risk_scores = {}
    for asset in assets.only("risk_level", "criticity", "id"):
        asset_grades_map[asset.risk_level["grade"]].update({
            asset.criticity:
            asset_grades_map[asset.risk_level["grade"]][asset.criticity] + 1
        })
        assets_risk_scores.update({asset.id: asset.get_risk_score()})

    top_critical_assets_scores = sorted(assets_risk_scores.items(),
                                        key=operator.itemgetter(1))[::-1][:6]

    tcas_id_list = [id for id, score in top_critical_assets_scores]
    top_critical_assets = list(assets.filter(id__in=tcas_id_list))
    top_critical_assets.sort(key=lambda t: tcas_id_list.index(t.id))

    # Format to list
    asset_grades_map_list = []
    for key in sorted(asset_grades_map.keys()):
        asset_grades_map_list.append({key: asset_grades_map[key]})

    # Asset groups
    assetgroups_risk_scores = {}
    ags = AssetGroup.objects.all().only("risk_level", "criticity", "id",
                                        "name")
    for assetgroup in ags:
        assetgroup_grades_map[assetgroup.risk_level["grade"]].update({
            assetgroup.criticity:
            assetgroup_grades_map[assetgroup.risk_level["grade"]][
                assetgroup.criticity] + 1
        })
        assetgroups_risk_scores.update(
            {assetgroup.id: assetgroup.get_risk_score()})

    top_critical_assetgroups_scores = sorted(
        assetgroups_risk_scores.items(), key=operator.itemgetter(1))[::-1][:6]
    tcags_id_list = [id for id, score in top_critical_assetgroups_scores]
    top_critical_assetgroups = list(ags.filter(id__in=tcags_id_list))
    top_critical_assetgroups.sort(key=lambda t: tcags_id_list.index(t.id))

    assetgroup_grades_map_list = []
    for key in sorted(assetgroup_grades_map.keys()):
        assetgroup_grades_map_list.append({key: assetgroup_grades_map[key]})

    # Critical findings
    top_critical_findings = []
    MAX_CF = 6
    for finding in findings.filter(severity="critical").only(
            "id", "severity", "title", "asset_name"):
        if len(top_critical_findings) <= MAX_CF:
            top_critical_findings.append(finding)
    if len(top_critical_findings) <= MAX_CF:
        for finding in findings.filter(severity="high").only(
                "id", "severity", "title", "asset_name"):
            if len(top_critical_findings) <= MAX_CF:
                top_critical_findings.append(finding)
    if len(top_critical_findings) <= MAX_CF:
        for finding in findings.filter(severity="medium").only(
                "id", "severity", "title", "asset_name"):
            if len(top_critical_findings) <= MAX_CF:
                top_critical_findings.append(finding)
    if len(top_critical_findings) <= MAX_CF:
        for finding in findings.filter(severity="low").only(
                "id", "severity", "title", "asset_name"):
            if len(top_critical_findings) <= MAX_CF:
                top_critical_findings.append(finding)
    if len(top_critical_findings) <= MAX_CF:
        for finding in findings.filter(severity="info").only(
                "id", "severity", "title", "asset_name"):
            if len(top_critical_findings) <= MAX_CF:
                top_critical_findings.append(finding)

    # CVSS
    cvss_scores = {'lte5': 0, '5to7': 0, 'gte7': 0, 'gte9': 0, 'eq10': 0}
    # for finding in findings.only("risk_info"):
    #     if finding.risk_info["cvss_base_score"] < 5.0: cvss_scores.update({'lte5': cvss_scores['lte5']+1})
    #     if finding.risk_info["cvss_base_score"] >= 5.0 and finding.risk_info["cvss_base_score"] <= 7.0: cvss_scores.update({'5to7': cvss_scores['5to7']+1})
    #     if finding.risk_info["cvss_base_score"] >= 7.0: cvss_scores.update({'gte7': cvss_scores['gte7']+1})
    #     if finding.risk_info["cvss_base_score"] >= 9.0 and finding.risk_info["cvss_base_score"] < 10: cvss_scores.update({'gte9': cvss_scores['gte9']+1})
    #     if finding.risk_info["cvss_base_score"] == 10.0: cvss_scores.update({'eq10': cvss_scores['eq10']+1})
    for finding in findings.prefetch_related(
            "risk_info__cvss_base_score").only("risk_info"):
        if finding.risk_info["cvss_base_score"] < 5.0:
            cvss_scores.update({'lte5': cvss_scores['lte5'] + 1})
        if finding.risk_info["cvss_base_score"] >= 5.0 and finding.risk_info[
                "cvss_base_score"] <= 7.0:
            cvss_scores.update({'5to7': cvss_scores['5to7'] + 1})
        if finding.risk_info["cvss_base_score"] >= 7.0:
            cvss_scores.update({'gte7': cvss_scores['gte7'] + 1})
        if finding.risk_info["cvss_base_score"] >= 9.0 and finding.risk_info[
                "cvss_base_score"] < 10:
            cvss_scores.update({'gte9': cvss_scores['gte9'] + 1})
        if finding.risk_info["cvss_base_score"] == 10.0:
            cvss_scores.update({'eq10': cvss_scores['eq10'] + 1})

    # CVE & CWE
    cxe_stats = {}
    cve_list = {}
    cwe_list = {}

    finding_cves_list = Finding.objects.exclude(
        Q(vuln_refs__CVE__isnull=True)
        | Q(status__in=['mitigated', 'patched', 'closed', 'false-positive'])
    ).annotate(cvelist=KeyTextTransform("CVE", 'vuln_refs')).values('cvelist')
    finding_cwes_list = Finding.objects.exclude(
        Q(vuln_refs__CWE__isnull=True)
        | Q(status__in=['mitigated', 'patched', 'closed', 'false-positive'])
    ).annotate(cwelist=KeyTextTransform("CWE", 'vuln_refs')).values('cwelist')

    for finding_cves in finding_cves_list:
        if finding_cves['cvelist'] is not None:
            for cve in ast.literal_eval(finding_cves['cvelist']):
                if cve not in cve_list.keys():
                    cve_list.update({cve: 1})
                else:
                    cve_list.update({cve: cve_list[cve] + 1})

    for cwe_data in finding_cwes_list:
        cwe = list(cwe_data.values())[0]
        if cwe not in cwe_list.keys():
            cwe_list.update({cwe: 1})
        else:
            cwe_list.update({cwe: cwe_list[cwe] + 1})

    cxe_stats.update({
        'top_cve':
        sorted(cve_list.items(), key=lambda x: x[1], reverse=True)[:10],
        'top_cwe':
        sorted(cwe_list.items(), key=lambda x: x[1], reverse=True)[:10],
    })

    return render(
        request, 'home-dashboard.html', {
            'global_stats': global_stats,
            'last_findings': last_findings,
            'last_scans': last_scans,
            'asset_grades_map': asset_grades_map_list,
            'assetgroup_grades_map': assetgroup_grades_map_list,
            'top_critical_assets': top_critical_assets,
            'top_critical_assetgroups': top_critical_assetgroups,
            'top_critical_findings': top_critical_findings,
            'cvss_scores': cvss_scores,
            'cxe_stats': cxe_stats
        })
Ejemplo n.º 16
0
class DeviceTypeViewSet(CustomFieldModelViewSet):
    queryset = DeviceType.objects.prefetch_related(
        'manufacturer', 'tags').annotate(
            device_count=Coalesce(get_subquery(Device, 'device_type'), 0))
    serializer_class = serializers.DeviceTypeSerializer
    filterset_class = filters.DeviceTypeFilterSet
Ejemplo n.º 17
0
import requests

from django.db.backends.signals import connection_created
from django.dispatch import receiver

import math, sqlite3


@receiver(connection_created)
def extend_sqlite(connection=None, **kwargs):
    if connection.vendor == 'sqlite':
        sqlite3.enable_callback_tracebacks(True)
        connection.connection.create_function("sqrt", 1, math.sqrt)


quotes = Quote.objects.annotate(score=Coalesce(Sum('vote__value'), 0),
                                votes=Count('vote')).filter(approved=True)


def api(request):
    query = request.GET.get('q', '')
    tag = request.GET.get('tag', '')
    quote_list = quotes.annotate(
        upvotes=Count('vote', filter=Q(vote__value=1)),
        downvotes=Count(
            'vote', filter=Q(vote__value=-1))).filter(content__contains=query)
    if tag: quote_list = quote_list.filter(tags__name__in=[tag])
    fields = request.GET.get(
        'fields',
        "id,content,notes,upvotes,downvotes,score,votes,timestamp").split(",")
    if set(fields) - {
Ejemplo n.º 18
0
class DeviceRoleViewSet(ModelViewSet):
    queryset = DeviceRole.objects.annotate(
        device_count=Coalesce(get_subquery(Device, 'device_role'), 0),
        virtualmachine_count=Coalesce(get_subquery(VirtualMachine, 'role'), 0))
    serializer_class = serializers.DeviceRoleSerializer
    filterset_class = filters.DeviceRoleFilterSet
Ejemplo n.º 19
0
 def balance(self):
     return self.transactions.aggregate(s=Coalesce(Sum('amount'), 0))['s']
Ejemplo n.º 20
0
class VirtualChassisViewSet(ModelViewSet):
    queryset = VirtualChassis.objects.prefetch_related('tags').annotate(
        member_count=Coalesce(get_subquery(Device, 'virtual_chassis'), 0))
    serializer_class = serializers.VirtualChassisSerializer
    filterset_class = filters.VirtualChassisFilterSet
Ejemplo n.º 21
0
 def get_max_order(self, request, obj=None):
     return self.base_model.objects.aggregate(
         max_order=Coalesce(Max(self.default_order_field), 0))['max_order']
Ejemplo n.º 22
0
class PowerPanelViewSet(ModelViewSet):
    queryset = PowerPanel.objects.prefetch_related(
        'site', 'rack_group').annotate(powerfeed_count=Coalesce(
            get_subquery(PowerFeed, 'power_panel'), 0))
    serializer_class = serializers.PowerPanelSerializer
    filterset_class = filters.PowerPanelFilterSet
Ejemplo n.º 23
0
    def annotate_with_numbers(qs, event):
        """
        Modifies a queryset of checkin lists by annotating it with the number of order positions and
        checkins associated with it.
        """
        # Import here to prevent circular import
        from . import Order, OrderPosition, Item

        # This is the mother of all subqueries. Sorry. I try to explain it, at least?
        # First, we prepare a subquery that for every check-in that belongs to a paid-order
        # position and to the list in question. Then, we check that it also belongs to the
        # correct subevent (just to be sure) and aggregate over lists (so, over everything,
        # since we filtered by lists).
        cqs_paid = Checkin.objects.filter(
            position__order__event=event,
            position__order__status=Order.STATUS_PAID,
            list=OuterRef('pk')
        ).filter(
            # This assumes that in an event with subevents, *all* positions have subevents
            # and *all* checkin lists have a subevent assigned
            Q(position__subevent=OuterRef('subevent'))
            | (Q(position__subevent__isnull=True))).order_by().values(
                'list').annotate(c=Count('*')).values('c')
        cqs_paid_and_pending = Checkin.objects.filter(
            position__order__event=event,
            position__order__status__in=[
                Order.STATUS_PAID, Order.STATUS_PENDING
            ],
            list=OuterRef('pk')
        ).filter(
            # This assumes that in an event with subevents, *all* positions have subevents
            # and *all* checkin lists have a subevent assigned
            Q(position__subevent=OuterRef('subevent'))
            | (Q(position__subevent__isnull=True))).order_by().values(
                'list').annotate(c=Count('*')).values('c')

        # Now for the hard part: getting all order positions that contribute to this list. This
        # requires us to use TWO subqueries. The first one, pqs_all, will only be used for check-in
        # lists that contain all the products of the event. This is the simpler one, it basically
        # looks like the check-in counter above.
        pqs_all_paid = OrderPosition.objects.filter(
            order__event=event,
            order__status=Order.STATUS_PAID,
        ).filter(
            # This assumes that in an event with subevents, *all* positions have subevents
            # and *all* checkin lists have a subevent assigned
            Q(subevent=OuterRef('subevent'))
            | (Q(subevent__isnull=True))).order_by().values(
                'order__event').annotate(c=Count('*')).values('c')
        pqs_all_paid_and_pending = OrderPosition.objects.filter(
            order__event=event,
            order__status__in=[Order.STATUS_PAID, Order.STATUS_PENDING]
        ).filter(
            # This assumes that in an event with subevents, *all* positions have subevents
            # and *all* checkin lists have a subevent assigned
            Q(subevent=OuterRef('subevent'))
            | (Q(subevent__isnull=True))).order_by().values(
                'order__event').annotate(c=Count('*')).values('c')

        # Now we need a subquery for the case of checkin lists that are limited to certain
        # products. We cannot use OuterRef("limit_products") since that would do a cross-product
        # with the products table and we'd get duplicate rows in the output with different annotations
        # on them, which isn't useful at all. Therefore, we need to add a second layer of subqueries
        # to retrieve all of those items and then check if the item_id is IN this subquery result.
        pqs_limited_paid = OrderPosition.objects.filter(
            order__event=event,
            order__status=Order.STATUS_PAID,
            item_id__in=Subquery(
                Item.objects.filter(
                    checkinlist__pk=OuterRef(OuterRef('pk'))).values('pk'))
        ).filter(
            # This assumes that in an event with subevents, *all* positions have subevents
            # and *all* checkin lists have a subevent assigned
            Q(subevent=OuterRef('subevent'))
            | (Q(subevent__isnull=True))).order_by().values(
                'order__event').annotate(c=Count('*')).values('c')
        pqs_limited_paid_and_pending = OrderPosition.objects.filter(
            order__event=event,
            order__status__in=[Order.STATUS_PAID, Order.STATUS_PENDING],
            item_id__in=Subquery(
                Item.objects.filter(
                    checkinlist__pk=OuterRef(OuterRef('pk'))).values('pk'))
        ).filter(
            # This assumes that in an event with subevents, *all* positions have subevents
            # and *all* checkin lists have a subevent assigned
            Q(subevent=OuterRef('subevent'))
            | (Q(subevent__isnull=True))).order_by().values(
                'order__event').annotate(c=Count('*')).values('c')

        # Finally, we put all of this together. We force empty subquery aggregates to 0 by using Coalesce()
        # and decide which subquery to use for this row. In the end, we compute an integer percentage in case
        # we want to display a progress bar.
        return qs.annotate(
            checkin_count=Coalesce(
                Case(When(include_pending=True,
                          then=Subquery(cqs_paid_and_pending,
                                        output_field=models.IntegerField())),
                     default=Subquery(cqs_paid,
                                      output_field=models.IntegerField()),
                     output_field=models.IntegerField()), 0),
            position_count=Coalesce(
                Case(When(all_products=True,
                          include_pending=False,
                          then=Subquery(pqs_all_paid,
                                        output_field=models.IntegerField())),
                     When(all_products=True,
                          include_pending=True,
                          then=Subquery(pqs_all_paid_and_pending,
                                        output_field=models.IntegerField())),
                     When(all_products=False,
                          include_pending=False,
                          then=Subquery(pqs_limited_paid,
                                        output_field=models.IntegerField())),
                     default=Subquery(pqs_limited_paid_and_pending,
                                      output_field=models.IntegerField()),
                     output_field=models.IntegerField()),
                0)).annotate(percent=Case(When(position_count__gt=0,
                                               then=F('checkin_count') * 100 /
                                               F('position_count')),
                                          default=0,
                                          output_field=models.IntegerField()))
Ejemplo n.º 24
0
def card_community(request, **kwargs):
    profile = request.user.profile
    if 'idx' in kwargs:
        deck = Deck.objects.filter(profile=profile)
        question = deck.get(order_idx=kwargs['idx']
                            ).question  #gets the single card in question
    else:
        question = Question.objects.get(pk=kwargs['pk'])

    card_tags = question.tags.all()

    # Cache all public answers for current question, and all votes for those answers
    answers = Answer.objects.filter(
        public=True, question=question).prefetch_related('question')
    votes = Vote.objects.filter(
        answer__question=question).prefetch_related('answer')
    ## get all the users votes
    user_votes = votes.filter(profile=profile)
    ## append them to the relevant answer
    user_votes = votes.filter(profile=profile,
                              answer_id=OuterRef('pk')).values('vote')
    answers = answers.annotate(user_vote=Subquery(user_votes))

    ## Count up all the votes and sort by the top Answers
    ## Step 1: Count up the yes' and the no's
    # 1. a) Subqueries: Count up number of votes, filtered by answer and vote type. Saves it to 'yes' or 'no', and returns it.
    count_yes = votes.filter(
        answer__id=OuterRef('pk'),
        vote=1).values('vote').annotate(yes=Count('vote')).values('yes')
    count_no = votes.filter(
        answer__id=OuterRef('pk'),
        vote=-1).values('vote').annotate(no=Count('vote')).values('no')
    # 1. b) Main outer queries: Runs subquery on all answers for current question, saving returned vote counts to each answer.
    # Note: The only thing Coalesce is doing is replacing any 'none' responses with '0'
    answers = answers.annotate(yes_count=Coalesce(
        Subquery(count_yes, output_field=IntegerField()), 0))
    answers = answers.annotate(
        no_count=Coalesce(Subquery(count_no, output_field=IntegerField()), 0))
    # answers[0].yes_count
    # answers[0].no_count
    ## Step 2: Subtract no's from yes' and save as the total. (Coalesce setting 0's instead of Nones makes this work)
    annotated = answers.annotate(total_vote=ExpressionWrapper(
        F('yes_count') - F('no_count'), output_field=IntegerField()))
    # annotated[0].total_vote
    ## Step 3: Sort by the new total_vote field, highest count first.
    annotated_sorted = annotated.order_by(
        F('total_vote').desc(nulls_last=True))

    ## Notes on aggregate Sum vs annotate Count:
    # Aggregate works for individual answers, but you can't put .aggregate() inside a subquery. It evaluates right away.
    # sum_votes = Vote.objects.filter(answer__id=1).aggregate(yes=Sum('vote'))
    # So instead we need to count up all the Yes votes and all the No votes, and then subtract them

    values = {
        'question': question,
        'card_tags': card_tags,
        'answers': annotated_sorted
    }
    values['community'] = Answer.objects.filter(question=question,
                                                public=True).exists()
    with suppress(ObjectDoesNotExist):
        print('no valuse')
        values['current_answer'] = Answer.objects.get(author=profile,
                                                      question=question)
        print('curent answer value', values['current_answer'])
    with suppress(ObjectDoesNotExist):
        values['pain'] = Pain.objects.filter(
            profile=profile, question=question).latest(
                'time_stamp')  #.order_by('-time_stamp')[0:1].get()
    return render(request, 'flashr/card_community.html', values)
Ejemplo n.º 25
0
    def get_participant_stats_csv_data(self, exercise_ids):

        exercise_qs = exercise_models.Exercise.objects.filter(
            id__in=exercise_ids)

        email_type_count_map = self.get_emails_count_map(exercise_qs)

        exercise_participants_qs = (Participant.objects.filter(
            exercise_id=OuterRef("pk")).values("exercise_id").annotate(
                participant_count=Count("exercise_id")))

        participant_actions_qs = (ParticipantAction.objects.filter(
            participant__exercise_id=OuterRef("pk")
        ).values("participant__exercise_id").annotate(
            dcount=Count("participant__exercise_id"),
            emails_reported=self.get_action_count("email_reported"),
            emails_deleted=self.get_action_count("email_deleted"),
            emails_linked_click=self.get_action_count("email_link_clicked"),
            clicked_training_link=Case(
                When(
                    participant__exercise__debrief=True,
                    then=(self.get_action_count("training_link_clicked")),
                )),
            email_opened_attachment=self.get_action_count(
                "email_attachment_download"),
            webpage_clicked=self.get_action_count("webpage_click"),
            emails_replied=self.get_action_count("email_replied"),
            code_skipped=self.get_action_count(
                "training_release_code_skipped"),
            code_correct=self.get_action_count(
                "training_release_code_correct"),
            code_incorrect=self.get_action_count(
                "training_release_code_incorrect"),
        ))

        csv_columns = [
            "title",
            "trial_version",
            "emails_opened",
            "phishing_emails_opened",
            "pos_reported",
            "pos_deleted",
            "neg_clicked_link",
            "neg_entered_detail",
            "neg_replied_to_phishing_email",
            "neg_opened_attachment",
            "code_entered",
            "code_skipped",
            "code_correct",
            "code_incorrect",
            "participant_count",
            "training_link_clicked",
        ]

        rows_qs = exercise_qs.annotate(
            pos_reported=Coalesce(
                self.get_subquery_value(participant_actions_qs,
                                        "emails_reported"), 0),
            pos_deleted=Coalesce(
                self.get_subquery_value(participant_actions_qs,
                                        "emails_deleted"), 0),
            neg_clicked_link=Coalesce(
                self.get_subquery_value(participant_actions_qs,
                                        "emails_linked_click"),
                0,
            ),
            neg_entered_detail=Coalesce(
                self.get_subquery_value(participant_actions_qs,
                                        "webpage_clicked"), 0),
            neg_replied_to_phishing_email=Coalesce(
                self.get_subquery_value(participant_actions_qs,
                                        "emails_replied"), 0),
            neg_opened_attachment=Coalesce(
                self.get_subquery_value(participant_actions_qs,
                                        "email_opened_attachment"),
                0,
            ),
            code_skipped=Coalesce(
                self.get_subquery_value(participant_actions_qs,
                                        "code_skipped"), 0),
            code_correct=Coalesce(
                self.get_subquery_value(participant_actions_qs,
                                        "code_correct"), 0),
            code_incorrect=Coalesce(
                self.get_subquery_value(participant_actions_qs,
                                        "code_incorrect"), 0),
            code_entered=Coalesce(F("code_correct") + F("code_incorrect"), 0),
            participant_count=self.get_subquery_value(exercise_participants_qs,
                                                      "participant_count"),
            training_link_clicked=Coalesce(
                self.get_subquery_value(participant_actions_qs,
                                        "clicked_training_link"),
                Value("N.A."),
            ),
        ).values(
            "id",
            "title",
            "trial_version",
            "pos_reported",
            "pos_deleted",
            "neg_clicked_link",
            "neg_entered_detail",
            "neg_replied_to_phishing_email",
            "neg_opened_attachment",
            "code_entered",
            "code_skipped",
            "code_correct",
            "code_incorrect",
            "participant_count",
            "training_link_clicked",
        )

        csv_rows = list(rows_qs)

        for row in csv_rows:
            row["emails_opened"] = email_type_count_map[
                row["id"]]["regular_emails"]
            row["phishing_emails_opened"] = email_type_count_map[
                row["id"]]["phishing_emails"]

        return csv_columns, csv_rows
Ejemplo n.º 26
0
 def resolve_quantity_allocated(root, *_args):
     return root.allocations.aggregate(quantity_allocated=Coalesce(
         Sum("quantity_allocated"), 0))["quantity_allocated"]
Ejemplo n.º 27
0
def dashboard(request, template="organizaciones/dashboard.html"):
    if request.method == 'POST':
        mensaje = None
        form = OrganizacionesForm(request.POST)
        if form.is_valid():
            request.session['anio'] = form.cleaned_data['anio']
            request.session['organizacion'] = form.cleaned_data['organizacion']
            request.session['pais'] = form.cleaned_data['pais']

            mensaje = "Todas las variables estan correctamente :)"
            request.session['activo'] = True
            centinela = 1

        else:
            centinela = 0

    else:
        form = OrganizacionesForm()
        mensaje = "Existen alguno errores"

    if 'anio' not in request.session:
        if request.GET.get('pais', ''):
            filtro = []
            id_pais = request.GET.get('pais', '')
            a = ResultadosEvaluacion.objects.filter(organizacion__pais=id_pais)
            filtro.append(a)
            b = ResultadosImplementacion.objects.filter(
                organizacion__pais=id_pais)
            filtro.append(b)
    else:
        filtro = _queryset_filtrado(request)

    # unificar organizaciones de los dos modelos
    list_ev_org = []
    for x in filtro[0].values_list('organizacion', flat=True):
        list_ev_org.append(x)

    list_im_org = []
    for x in filtro[1].values_list('organizacion', flat=True):
        list_im_org.append(x)

    result_list = list(sorted(set(list_ev_org + list_im_org)))

    # --------
    orgs = Organizacion.objects.filter(id__in=result_list)

    total_hombres = ProductoresProveedores.objects.filter(
        organizacion__in=result_list).aggregate(
            total=Sum('total_hombre'))['total']
    total_mujeres = ProductoresProveedores.objects.filter(
        organizacion__in=result_list).aggregate(
            total=Sum('total_mujer'))['total']

    activos_hombres = ProductoresProveedores.objects.filter(
        organizacion__in=result_list).aggregate(
            total=Sum('activos_hombre'))['total']
    activos_mujeres = ProductoresProveedores.objects.filter(
        organizacion__in=result_list).aggregate(
            total=Sum('activos_mujer'))['total']

    jovenes_hombres = ProductoresProveedores.objects.filter(
        organizacion__in=result_list).aggregate(
            total=Sum('jovenes_hombre'))['total']
    jovenes_mujeres = ProductoresProveedores.objects.filter(
        organizacion__in=result_list).aggregate(
            total=Sum('jovenes_mujer'))['total']

    # empleados de la org tiempo completo, preg 22
    empleados_org = EmpleadosOrganizacion.objects.filter(
        organizacion__in=result_list,
        opcion=1).aggregate(total_hombre=Sum('total_hombre'),
                            total_mujer=Sum('total_mujer'),
                            adultos_hombre=Sum('adultos_hombre'),
                            adultos_mujer=Sum('adultos_mujer'),
                            jovenes_hombre=Sum('jovenes_hombre'),
                            jovenes_mujer=Sum('jovenes_mujer'))

    # servicios y productos
    sectores = {}
    for obj in CHOICE_SECTOR:
        conteo = SectoresProductos.objects.filter(organizacion__in=result_list,
                                                  sector=obj[0]).count()

        productos = []
        for x in Productos.objects.filter(sector__sector=obj[0]):
            prod = ProductosOrg.objects.filter(id=x.producto1.id)
            productos.append((prod, prod.count()))

        sectores[obj[1]] = conteo, productos

    # situacion legal y organizativa de org
    personeria = Organizacion.objects.filter(id__in=result_list,
                                             personeria=1).count()
    en_operaciones = Organizacion.objects.filter(id__in=result_list,
                                                 en_operaciones=1).count()
    apoyo = Organizacion.objects.filter(id__in=result_list, apoyo=1).count()

    #salida grafo carlos resultado implementacion
    grafo_barra_cultivo = collections.OrderedDict()
    for obj in CHOICES_34_1:
        cafe = ProducenComercializan.objects.filter(
            cultivo__tipo=1,
            opcion=obj[0]).aggregate(a=Coalesce(Sum('cantidad'), V(0)))['a']
        cacao = ProducenComercializan.objects.filter(
            cultivo__tipo=2,
            opcion=obj[0]).aggregate(a=Coalesce(Sum('cantidad'), V(0)))['a']
        hortaliza = ProducenComercializan.objects.filter(
            cultivo__tipo=3,
            opcion=obj[0]).aggregate(a=Coalesce(Sum('cantidad'), V(0)))['a']

        # promedio_precio = ProducenComercializan.objects.filter(cultivo=obj
        #                             ).aggregate(c=Coalesce(Avg('precio_promedio'),V(0)))['c']

        grafo_barra_cultivo[obj[1]] = [cafe, cacao, hortaliza]

    promedio_precio_cafe = ProducenComercializan.objects.filter(
        cultivo__tipo=1, ).aggregate(
            c=Coalesce(Avg('precio_promedio'), V(0)))['c']
    promedio_precio_cacao = ProducenComercializan.objects.filter(
        cultivo__tipo=2, ).aggregate(
            c=Coalesce(Avg('precio_promedio'), V(0)))['c']
    promedio_precio_hortaliza = ProducenComercializan.objects.filter(
        cultivo__tipo=3, ).aggregate(
            c=Coalesce(Avg('precio_promedio'), V(0)))['c']

    lista_precio_promedio = [
        promedio_precio_cafe, promedio_precio_cacao, promedio_precio_hortaliza
    ]

    return render(request, template, locals())
Ejemplo n.º 28
0
def get_unread_message_count(person_pk):
    unread_comment_count_subquery = Coalesce(
        Subquery(
            SupportGroupMessageComment.objects.filter(
                deleted=False,
                message_id=OuterRef("id"),
                created__gt=Greatest(
                    OuterRef("last_reading_date"),
                    OuterRef("membership_created"),
                    OuterRef("created"),
                ),
            )
            .exclude(
                author_id=person_pk,
            )
            .values("message_id")
            .annotate(count=Count("pk"))
            .values("count"),
            output_field=IntegerField(),
        ),
        0,
    )

    # Filter messages where person is not allowed (not author, not in required membership)
    messages = SupportGroupMessage.objects.filter(
        deleted=False,
        supportgroup_id__in=SupportGroup.objects.active()
        .filter(memberships__person_id=person_pk)
        .values("id"),
    )
    if not isinstance(person_pk, Person):
        person_pk = Person.objects.get(pk=person_pk)
    messages_allowed_id = [
        msg.id
        for msg in messages
        if person_pk.role.has_perm("msgs.view_supportgroupmessage", msg)
    ]

    unread_message_count = (
        SupportGroupMessage.objects.filter(pk__in=messages_allowed_id)
        .annotate(
            membership_created=Subquery(
                Membership.objects.filter(
                    supportgroup_id=OuterRef("supportgroup_id"),
                    person_id=person_pk,
                ).values("created")[:1]
            )
        )
        .annotate(
            last_reading_date=Subquery(
                SupportGroupMessageRecipient.objects.filter(
                    recipient_id=person_pk, message_id=OuterRef("id")
                ).values("modified")[:1]
            )
        )
        .annotate(unread_comment_count=unread_comment_count_subquery)
        .annotate(
            unread_count=Case(
                # If the message is unread and has been created after the person has joined the group,
                # count 1 for the message plus 1 for each of the message's comments
                When(
                    last_reading_date=None,
                    created__gt=F("membership_created"),
                    then=F("unread_comment_count") + 1,
                ),
                # If the message has already been read once, count 1 for each comment
                # created after the last reading date and after the person has joined
                # the group
                default=F("unread_comment_count"),
            ),
        )
        .values_list("unread_count", flat=True)
    )

    return sum(unread_message_count)
Ejemplo n.º 29
0
def _get_quantity_allocated(stocks: StockQuerySet) -> int:
    return stocks.aggregate(quantity_allocated=Coalesce(
        Sum("allocations__quantity_allocated"), 0))["quantity_allocated"]
Ejemplo n.º 30
0
 def aggregate_pending_suggestions(self):
     return dict(
         pending_suggestions=Coalesce(Sum("pending_suggestions"), 0))