Exemplo n.º 1
0
 def polymorphic_prefetch_related(self, *lookups):
     clone = self._clone()
     for lookup in lookups:
         if not isinstance(lookup, Prefetch):
             lookup = Prefetch(lookup)
         if lookup.queryset is None:
             rel = self.model._meta.get_field(lookup.prefetch_through)
             lookup.queryset = rel.related_model.objects.all()
         lookup.queryset = lookup.queryset.polymorphic()
         clone = clone.prefetch_related(lookup)
     return clone
Exemplo n.º 2
0
class RunApiViewSet(mixins.ListModelMixin, mixins.CreateModelMixin,
                    mixins.RetrieveModelMixin, mixins.UpdateModelMixin,
                    GenericViewSet):
    queryset = Run.objects.prefetch_related(
        Prefetch('port_set', queryset=Port.objects.select_related(
            'run'))).order_by('-created_date').all()

    def get_serializer_class(self):
        if self.action == 'list':
            return RunApiListSerializer
        else:
            return RunSerializerFull

    def query_from_dict(self, query_filter, queryset, input_list):
        for single_input in input_list:
            key, val = single_input.split(':')
            query = {query_filter % key: val}
            queryset = queryset.filter(**query).all()
        return queryset

    @swagger_auto_schema(query_serializer=RunApiListSerializer)
    def list(self, request, *args, **kwargs):
        query_list_types = [
            'job_groups', 'request_ids', 'inputs', 'tags', 'jira_ids',
            'run_ids', 'apps', 'run', 'values_run', 'ports'
        ]
        fixed_query_params = fix_query_list(request.query_params,
                                            query_list_types)
        serializer = RunApiListSerializer(data=fixed_query_params)
        if serializer.is_valid():
            queryset = time_filter(Run, fixed_query_params)
            queryset = time_filter(Run,
                                   request.query_params,
                                   time_modal='modified_date',
                                   previous_queryset=queryset)
            job_groups = fixed_query_params.get('job_groups')
            jira_ids = fixed_query_params.get('jira_ids')
            run_ids = fixed_query_params.get('run_ids')
            status_param = fixed_query_params.get('status')
            ports = fixed_query_params.get('ports')
            tags = fixed_query_params.get('tags')
            request_ids = fixed_query_params.get('request_ids')
            apps = fixed_query_params.get('apps')
            values_run = fixed_query_params.get('values_run')
            run = fixed_query_params.get('run')
            run_distribution = fixed_query_params.get('run_distribution')
            count = fixed_query_params.get('count')
            full = fixed_query_params.get('full')
            if full:
                full = bool(strtobool(full))
            if job_groups:
                queryset = queryset.filter(job_group__in=job_groups)
            if jira_ids:
                queryset = queryset.filter(job_group__jira_id__in=jira_ids)
            if run_ids:
                queryset = queryset.filter(id__in=run_ids)
            if status_param:
                queryset = queryset.filter(
                    status=RunStatus[status_param].value)
            if ports:
                queryset = self.query_from_dict("port__%s__exact", queryset,
                                                ports)
            if tags:
                queryset = self.query_from_dict("tags__%s__exact", queryset,
                                                tags)
            if request_ids:
                queryset = queryset.filter(tags__requestId__in=request_ids)
            if apps:
                queryset = queryset.filter(app__in=apps)
            if run:
                filter_query = dict()
                for single_run in run:
                    key, value = single_run.split(':')
                    if value == 'True' or value == 'true':
                        value = True
                    if value == 'False' or value == 'false':
                        value = False
                    filter_query[key] = value
                if filter_query:
                    queryset = queryset.filter(**filter_query)
            if values_run:
                if len(values_run) == 1:
                    ret_str = values_run[0]
                    queryset = queryset.values_list(
                        ret_str, flat=True).order_by(ret_str).distinct(ret_str)
                else:
                    values_run_query_list = [
                        single_run for single_run in values_run
                    ]
                    values_run_query_set = set(values_run_query_list)
                    queryset = queryset.values_list(
                        *values_run_query_set).distinct()
            if run_distribution:
                distribution_dict = {}
                run_query = run_distribution
                run_ids = queryset.values_list('id', flat=True)
                queryset = Run.objects.all()
                queryset = queryset.filter(
                    id__in=run_ids).values(run_query).order_by().annotate(
                        Count(run_query))
                for single_arg in queryset:
                    single_arg_name = None
                    single_arg_count = 0
                    for single_key, single_value in single_arg.items():
                        if 'count' in single_key:
                            single_arg_count = single_value
                        else:
                            single_arg_name = single_value
                    if single_arg_name is not None:
                        distribution_dict[single_arg_name] = single_arg_count
                return Response(distribution_dict, status=status.HTTP_200_OK)
            if count:
                count = bool(strtobool(count))
                if count:
                    return Response(queryset.count(),
                                    status=status.HTTP_200_OK)
            try:
                page = self.paginate_queryset(queryset.all())
            except ValidationError as e:
                return Response(e, status=status.HTTP_400_BAD_REQUEST)
            if page is not None:
                if values_run:
                    return self.get_paginated_response(page)
                if full:
                    serializer = RunSerializerFull(page, many=True)
                else:
                    serializer = RunSerializerPartial(page, many=True)
                return self.get_paginated_response(serializer.data)
            return Response([], status=status.HTTP_200_OK)
        else:
            return Response(serializer.errors,
                            status=status.HTTP_400_BAD_REQUEST)

    def create(self, request, *args, **kwargs):
        serializer = APIRunCreateSerializer(data=request.data,
                                            context={'request': request})
        if serializer.is_valid():
            run = serializer.save()
            response = RunSerializerFull(run)
            create_run_task.delay(response.data['id'], request.data['inputs'])
            job_group_notifier_id = str(run.job_group_notifier_id)
            self._send_notifications(job_group_notifier_id, run)
            return Response(response.data, status=status.HTTP_201_CREATED)
        return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)

    def _send_notifications(self, job_group_notifier_id, run):
        pipeline_name = run.app.name
        pipeline_version = run.app.version
        pipeline_link = run.app.pipeline_link

        pipeline_description_event = AddPipelineToDescriptionEvent(
            job_group_notifier_id, pipeline_name, pipeline_version,
            pipeline_link).to_dict()
        send_notification.delay(pipeline_description_event)

        run_event = RunStartedEvent(job_group_notifier_id, str(run.id),
                                    run.app.name, run.app.pipeline_link,
                                    run.output_directory, run.tags).to_dict()
        send_notification.delay(run_event)
Exemplo n.º 3
0
def find_root_by_name(text_label_name):
    return TextLabel.objects.prefetch_related(
        Prefetch('translatedtextlabel_set',
                 to_attr="translated_text_labels")).get(label=text_label_name,
                                                        parent__isnull=True)
Exemplo n.º 4
0
    def generateCustomSiteReport(self, pk, base_url, fs_ids, startdate,
                                 enddate, removeNullField):
        self.base_url = base_url
        self.removeNullField = removeNullField

        # Our container for 'Flowable' objects
        elements = []
        toc = TableOfContents()
        toc.levelStyles = [
            PS(fontName='arialuni',
               fontSize=12,
               name='TOCHeading1',
               leftIndent=20,
               firstLineIndent=-20,
               spaceBefore=5,
               leading=10),
            PS(fontName='arialuni',
               fontSize=10,
               name='TOCHeading2',
               leftIndent=40,
               firstLineIndent=-20,
               spaceBefore=3,
               leading=10),
            PS(fontName='arialuni',
               fontSize=9,
               name='TOCHeading3',
               leftIndent=40,
               firstLineIndent=-20,
               spaceBefore=3,
               leading=10),
        ]
        elements.append(
            Paragraph('Custom Responses Report for Site', self.centered))
        elements.append(PageBreak())
        elements.append(Paragraph('Table of contents', self.centered))
        elements.append(toc)
        elements.append(PageBreak())

        # A large collection of style sheets pre-made for us
        styles = getSampleStyleSheet()
        styles.add(ParagraphStyle(name='centered', alignment=TA_CENTER))
        site = Site.objects.select_related('project').get(pk=pk)
        self.project_name = site.project.name
        self.project_logo = site.project.logo.url

        elements.append(Paragraph(site.name, self.h1))
        elements.append(Paragraph(site.identifier, styles['Normal']))
        if site.address:
            elements.append(Paragraph(site.address, styles['Normal']))
        if site.phone:
            elements.append(Paragraph(site.phone, styles['Normal']))
        if site.region:
            elements.append(Paragraph(site.region.name, styles['Normal']))

        elements.append(Spacer(0, 10))

        elements.append(Paragraph("Site Information", styles['Normal']))
        metas = generateSiteMetaAttribs(pk)

        styBackground = ParagraphStyle('background',
                                       parent=self.bodystyle,
                                       backColor=colors.white)

        meta_data = []
        if metas:
            for meta in metas:
                row = [
                    Paragraph(meta['question_text'], styBackground),
                    Paragraph(
                        str(meta['answer']) if isinstance(meta['answer'], int)
                        else meta['answer'], styBackground)
                ]
                meta_data.append(row)

            metat1 = Table(meta_data, colWidths=(60 * mm, None))
            metat1.setStyle(self.ts1)
            elements.append(metat1)

        elements.append(PageBreak())
        elements.append(Paragraph('Responses', self.h2))

        split_startdate = startdate.split('-')
        split_enddate = enddate.split('-')

        new_startdate = date(int(split_startdate[0]), int(split_startdate[1]),
                             int(split_startdate[2]))
        end = date(int(split_enddate[0]), int(split_enddate[1]),
                   int(split_enddate[2]))

        new_enddate = end + datetime.timedelta(days=1)

        forms = FieldSightXF.objects.select_related('xf').filter(
            pk__in=fs_ids, is_survey=False, is_deleted=False).order_by(
                'project', 'is_staged', 'is_scheduled', 'stage__stage__order',
                'stage__order', 'stage__date_created',
                'date_created').prefetch_related(
                    Prefetch(
                        'site_form_instances',
                        queryset=FInstance.objects.select_related('instance').
                        filter(date__range=[new_startdate, new_enddate])),
                    Prefetch('project_form_instances',
                             queryset=FInstance.objects.select_related(
                                 'instance').filter(
                                     site_id=site.id,
                                     date__range=[new_startdate,
                                                  new_enddate])))

        if not forms:
            elements.append(
                Paragraph("No Any Responses Yet.", styles['Heading5']))
        #a=FieldSightXF.objects.select_related('xf').filter(site_id=291).prefetch_related(Prefetch('site_form_instances', queryset=FInstance.objects.select_related('instance')))

        styNormal = styleSheet['Normal']
        styBackground = ParagraphStyle('background',
                                       parent=styNormal,
                                       backColor=colors.white)

        for form in forms:
            elements.append(Spacer(0, 10))
            elements.append(Paragraph(form.xf.title, self.h3))
            elements.append(
                Paragraph(form.form_type() + " Form", styles['Heading4']))
            if form.stage:
                if form.stage.stage:
                    elements.append(
                        Paragraph("Stage Id: " + str(form.stage.stage.order),
                                  self.paragraphstyle))
                    elements.append(
                        Paragraph("Sub Stage Id: " + str(form.stage.order),
                                  self.paragraphstyle))
                else:
                    elements.append(
                        Paragraph("Stage Id: " + str(form.stage.order),
                                  self.paragraphstyle))

            json_question = form.xf.json
            form_user_name = form.xf.user.username
            self.media_folder = form_user_name

            #cursor = get_instaces_for_site_individual_form(form.id)

            sub_count = 0
            if not form.from_project and form.site_form_instances.all():
                for instance in form.site_form_instances.all():
                    self.instance_id = instance.instance_id
                    new_elements = self.append_answers(json_question, instance,
                                                       sub_count)
                    elements += new_elements

            elif form.project_form_instances.all():
                for instance in form.project_form_instances.all():
                    self.instance_id = instance.instance_id
                    new_elements = self.append_answers(json_question, instance,
                                                       sub_count)
                    elements += new_elements

            else:
                elements.append(
                    Paragraph("No Submisions Yet. ", styles['Heading5']))
                elements.append(Spacer(0, 10))
        self.doc.multiBuild(elements, onLaterPages=self._header_footer)
Exemplo n.º 5
0
def get_grouped_items(event, subevent=None, voucher=None, channel='web'):
    items = event.items.filter_available(
        channel=channel, voucher=voucher).select_related(
            'category',
            'tax_rule',  # for re-grouping
        ).prefetch_related(
            Prefetch('quotas',
                     to_attr='_subevent_quotas',
                     queryset=event.quotas.filter(subevent=subevent)),
            Prefetch(
                'bundles',
                queryset=ItemBundle.objects.prefetch_related(
                    Prefetch('bundled_item',
                             queryset=event.items.select_related(
                                 'tax_rule').prefetch_related(
                                     Prefetch('quotas',
                                              to_attr='_subevent_quotas',
                                              queryset=event.quotas.filter(
                                                  subevent=subevent)), )),
                    Prefetch('bundled_variation',
                             queryset=ItemVariation.objects.select_related(
                                 'item', 'item__tax_rule').filter(
                                     item__event=event).prefetch_related(
                                         Prefetch('quotas',
                                                  to_attr='_subevent_quotas',
                                                  queryset=event.quotas.filter(
                                                      subevent=subevent)), )),
                )),
            Prefetch('variations',
                     to_attr='available_variations',
                     queryset=ItemVariation.objects.filter(
                         active=True, quotas__isnull=False).prefetch_related(
                             Prefetch('quotas',
                                      to_attr='_subevent_quotas',
                                      queryset=event.quotas.filter(
                                          subevent=subevent))).distinct()),
        ).annotate(quotac=Count('quotas'),
                   has_variations=Count('variations')).filter(
                       quotac__gt=0).order_by('category__position',
                                              'category_id', 'position',
                                              'name')
    display_add_to_cart = False
    external_quota_cache = event.cache.get('item_quota_cache')
    quota_cache = external_quota_cache or {}

    if subevent:
        item_price_override = subevent.item_price_overrides
        var_price_override = subevent.var_price_overrides
    else:
        item_price_override = {}
        var_price_override = {}

    restrict_vars = set()
    if voucher and voucher.quota_id:
        # If a voucher is set to a specific quota, we need to filter out on that level
        restrict_vars = set(voucher.quota.variations.all())

    for item in items:
        if voucher and voucher.item_id and voucher.variation_id:
            # Restrict variations if the voucher only allows one
            item.available_variations = [
                v for v in item.available_variations
                if v.pk == voucher.variation_id
            ]

        max_per_order = item.max_per_order or int(
            event.settings.max_items_per_order)

        if not item.has_variations:
            item._remove = not bool(item._subevent_quotas)

            if voucher and (voucher.allow_ignore_quota or voucher.block_quota):
                item.cached_availability = (Quota.AVAILABILITY_OK,
                                            voucher.max_usages -
                                            voucher.redeemed)
            else:
                item.cached_availability = list(
                    item.check_quotas(subevent=subevent,
                                      _cache=quota_cache,
                                      include_bundled=True))

            item.order_max = min(
                item.cached_availability[1] if item.cached_availability[1]
                is not None else sys.maxsize, max_per_order)

            original_price = item_price_override.get(item.pk,
                                                     item.default_price)
            if voucher:
                price = voucher.calculate_price(original_price)
            else:
                price = original_price

            item.display_price = item.tax(price,
                                          currency=event.currency,
                                          include_bundled=True)

            if price != original_price:
                item.original_price = original_price

            display_add_to_cart = display_add_to_cart or item.order_max > 0
        else:
            for var in item.available_variations:
                if voucher and (voucher.allow_ignore_quota
                                or voucher.block_quota):
                    var.cached_availability = (Quota.AVAILABILITY_OK,
                                               voucher.max_usages -
                                               voucher.redeemed)
                else:
                    var.cached_availability = list(
                        var.check_quotas(subevent=subevent,
                                         _cache=quota_cache,
                                         include_bundled=True))

                var.order_max = min(
                    var.cached_availability[1] if var.cached_availability[1]
                    is not None else sys.maxsize, max_per_order)

                original_price = var_price_override.get(var.pk, var.price)
                if voucher:
                    price = voucher.calculate_price(original_price)
                else:
                    price = original_price

                var.display_price = var.tax(price,
                                            currency=event.currency,
                                            include_bundled=True)

                if price != original_price:
                    var.original_price = original_price

                display_add_to_cart = display_add_to_cart or var.order_max > 0

            item.available_variations = [
                v for v in item.available_variations
                if v._subevent_quotas and (
                    not voucher or not voucher.quota_id or v in restrict_vars)
            ]

            if voucher and voucher.variation_id:
                item.available_variations = [
                    v for v in item.available_variations
                    if v.pk == voucher.variation_id
                ]

            if len(item.available_variations) > 0:
                item.min_price = min([
                    v.display_price.net if event.settings.display_net_prices
                    else v.display_price.gross
                    for v in item.available_variations
                ])
                item.max_price = max([
                    v.display_price.net if event.settings.display_net_prices
                    else v.display_price.gross
                    for v in item.available_variations
                ])

            item._remove = not bool(item.available_variations)

    if not external_quota_cache:
        event.cache.set('item_quota_cache', quota_cache, 5)
    items = [
        item for item in items
        if (len(item.available_variations) > 0 or not item.has_variations)
        and not item._remove
    ]
    return items, display_add_to_cart
Exemplo n.º 6
0
class RunApiViewSet(mixins.ListModelMixin, mixins.CreateModelMixin,
                    mixins.RetrieveModelMixin, mixins.UpdateModelMixin,
                    GenericViewSet):
    queryset = (Run.objects.prefetch_related(
        Prefetch("port_set", queryset=Port.objects.select_related(
            "run"))).order_by("-created_date").all())

    def get_serializer_class(self):
        if self.action == "list":
            return RunApiListSerializer
        else:
            return RunSerializerFull

    @swagger_auto_schema(query_serializer=RunApiListSerializer)
    def list(self, request, *args, **kwargs):
        query_list_types = [
            "job_groups",
            "request_ids",
            "inputs",
            "tags",
            "jira_ids",
            "run_ids",
            "apps",
            "run",
            "values_run",
            "ports",
        ]
        fixed_query_params = fix_query_list(request.query_params,
                                            query_list_types)
        serializer = RunApiListSerializer(data=fixed_query_params)
        if serializer.is_valid():
            queryset = time_filter(Run, fixed_query_params)
            queryset = time_filter(Run,
                                   request.query_params,
                                   time_modal="modified_date",
                                   previous_queryset=queryset)
            job_groups = fixed_query_params.get("job_groups")
            jira_ids = fixed_query_params.get("jira_ids")
            run_ids = fixed_query_params.get("run_ids")
            status_param = fixed_query_params.get("status")
            ports = fixed_query_params.get("ports")
            tags = fixed_query_params.get("tags")
            operator_run = fixed_query_params.get("operator_run")
            request_ids = fixed_query_params.get("request_ids")
            apps = fixed_query_params.get("apps")
            values_run = fixed_query_params.get("values_run")
            run = fixed_query_params.get("run")
            run_distribution = fixed_query_params.get("run_distribution")
            count = fixed_query_params.get("count")
            full = fixed_query_params.get("full")
            if full:
                full = bool(strtobool(full))
            if operator_run:
                queryset = queryset.filter(operator_run_id=operator_run)
            if job_groups:
                queryset = queryset.filter(job_group__in=job_groups)
            if jira_ids:
                queryset = queryset.filter(job_group__jira_id__in=jira_ids)
            if run_ids:
                queryset = queryset.filter(id__in=run_ids)
            if status_param:
                queryset = queryset.filter(
                    status=RunStatus[status_param].value)
            if ports:
                queryset = query_from_dict("port__%s__exact", queryset, ports)
            if tags:
                queryset = query_from_dict("tags__%s__contains", queryset,
                                           tags)
            if request_ids:
                queryset = queryset.filter(tags__requestId__in=request_ids)
            if apps:
                queryset = queryset.filter(app__in=apps)
            if run:
                filter_query = dict()
                for single_run in run:
                    key, value = single_run.split(":")
                    if value == "True" or value == "true":
                        value = True
                    if value == "False" or value == "false":
                        value = False
                    filter_query[key] = value
                if filter_query:
                    queryset = queryset.filter(**filter_query)
            if values_run:
                if len(values_run) == 1:
                    ret_str = values_run[0]
                    queryset = queryset.values_list(
                        ret_str, flat=True).order_by(ret_str).distinct(ret_str)
                else:
                    values_run_query_list = [
                        single_run for single_run in values_run
                    ]
                    values_run_query_set = set(values_run_query_list)
                    sorted_query_list = sorted(values_run_query_set)
                    queryset = queryset.values_list(
                        *sorted_query_list).distinct()
            if run_distribution:
                distribution_dict = {}
                run_query = run_distribution
                run_ids = queryset.values_list("id", flat=True)
                queryset = Run.objects.all()
                queryset = queryset.filter(
                    id__in=run_ids).values(run_query).order_by().annotate(
                        Count(run_query))
                for single_arg in queryset:
                    single_arg_name = None
                    single_arg_count = 0
                    for single_key, single_value in single_arg.items():
                        if "count" in single_key:
                            single_arg_count = single_value
                        else:
                            single_arg_name = single_value
                    if single_arg_name is not None:
                        distribution_dict[single_arg_name] = single_arg_count
                return Response(distribution_dict, status=status.HTTP_200_OK)
            if count:
                count = bool(strtobool(count))
                if count:
                    return Response(queryset.count(),
                                    status=status.HTTP_200_OK)
            try:
                page = self.paginate_queryset(queryset.all())
            except ValidationError as e:
                return Response(e, status=status.HTTP_400_BAD_REQUEST)
            if page is not None:
                if values_run:
                    return self.get_paginated_response(page)
                if full:
                    serializer = RunSerializerFull(page, many=True)
                else:
                    serializer = RunSerializerPartial(page, many=True)
                return self.get_paginated_response(serializer.data)
            return Response([], status=status.HTTP_200_OK)
        else:
            return Response(serializer.errors,
                            status=status.HTTP_400_BAD_REQUEST)

    def create(self, request, *args, **kwargs):
        run_creator = RunCreator(**request.data)
        if run_creator.is_valid():
            run = run_creator.create()
            response = RunSerializerFull(run)
            create_run_task.delay(response.data["id"], request.data["inputs"])
            job_group_notifier_id = str(run.job_group_notifier_id)
            if job_group_notifier_id:
                self._send_notifications(job_group_notifier_id, run)
            return Response(response.data, status=status.HTTP_201_CREATED)
        return Response("Error", status=status.HTTP_400_BAD_REQUEST)

    @swagger_auto_schema(request_body=AbortRunSerializer)
    @action(detail=False, methods=["post"])
    def abort(self, request, *args, **kwargs):
        serializer = AbortRunSerializer(data=request.data)
        if not serializer.is_valid():
            return Response(serializer.errors,
                            status=status.HTTP_400_BAD_REQUEST)
        job_group_id = request.data.get("job_group_id", None)
        runs = request.data.get("runs", [])
        abort_job_task.delay(job_group_id, runs)
        return Response("Abort task submitted",
                        status=status.HTTP_202_ACCEPTED)

    def _send_notifications(self, job_group_notifier_id, run):
        pipeline_name = run.app.name
        pipeline_version = run.app.version
        pipeline_link = run.app.pipeline_link

        pipeline_description_event = AddPipelineToDescriptionEvent(
            job_group_notifier_id, pipeline_name, pipeline_version,
            pipeline_link).to_dict()
        send_notification.delay(pipeline_description_event)

        run_event = RunStartedEvent(job_group_notifier_id, str(run.id),
                                    run.app.name, run.app.pipeline_link,
                                    run.output_directory, run.tags).to_dict()
        send_notification.delay(run_event)
Exemplo n.º 7
0
def listar_modulos_com_aulas():
    aulas_ordenadas = Aula.objects.order_by('order')
    return Modulo.objects.order_by('order').prefetch_related(
        Prefetch('aula_set', queryset=aulas_ordenadas, to_attr='aulas')).all()
Exemplo n.º 8
0
 def with_orders(self):
     qs = (Order.objects.with_total_value().with_payment().select_related(
         'user').all())
     return self.prefetch_related(
         Prefetch('order_set', queryset=qs, to_attr='orders'))
Exemplo n.º 9
0
class CourseViewSet(NonDestroyableModelViewSet):
    product_attribute_value_prefetch = Prefetch(
        'products__attribute_values',
        queryset=ProductAttributeValue.objects.select_related(
            'attribute').all())
    products_prefetch = Prefetch(
        'products',
        queryset=Product.objects.select_related('parent__product_class').all())
    lookup_value_regex = COURSE_ID_REGEX
    serializer_class = serializers.CourseSerializer
    permission_classes = (
        IsAuthenticated,
        IsAdminUser,
    )

    def get_queryset(self):
        return Course.objects.filter(site=self.request.site).prefetch_related(
            self.products_prefetch, self.product_attribute_value_prefetch,
            'products__stockrecords')

    def list(self, request, *args, **kwargs):
        """
        List all courses.
        ---
        parameters:
            - name: include_products
              description: Indicates if the related products should be included in the response.
              required: false
              type: boolean
              paramType: query
              multiple: false
        """
        return super(CourseViewSet, self).list(request, *args, **kwargs)

    def create(self, request, *args, **kwargs):
        course = Course.objects.create(id=request.data['id'],
                                       name=request.data['name'],
                                       site=request.site)
        data = serializers.CourseSerializer(course,
                                            context={
                                                'request': request
                                            }).data
        return Response(data, status=status.HTTP_201_CREATED)

    def retrieve(self, request, *args, **kwargs):
        """
        Retrieve details for a course.
        ---
        parameters:
            - name: include_products
              description: Indicates if the related products should be included in the response.
              required: false
              type: boolean
              paramType: query
              multiple: false
        """
        return super(CourseViewSet, self).retrieve(request, *args, **kwargs)

    def get_serializer_context(self):
        context = super(CourseViewSet, self).get_serializer_context()
        context['include_products'] = bool(
            self.request.GET.get('include_products', False))
        return context

    @detail_route(methods=['post'])
    def publish(self, request, pk=None):  # pylint: disable=unused-argument
        """ Publish the course to LMS. """
        course = self.get_object()
        published = False
        msg = 'Course [{course_id}] was not published to LMS ' \
              'because the switch [publish_course_modes_to_lms] is disabled.'

        if waffle.switch_is_active('publish_course_modes_to_lms'):
            published = course.publish_to_lms()
            if published:
                msg = 'Course [{course_id}] was successfully published to LMS.'
            else:
                msg = 'An error occurred while publishing [{course_id}] to LMS.'

        return Response({'status': msg.format(course_id=course.id)},
                        status=status.HTTP_200_OK if published else
                        status.HTTP_500_INTERNAL_SERVER_ERROR)
Exemplo n.º 10
0
    def get_data_for_es(cls, index, action):
        """
        Load all the course runs from the Course model and format them for the ElasticSearch index
        """
        for course in (Course.objects.filter(
                extended_object__publisher_is_draft=False,
                extended_object__title_set__published=True,
        ).prefetch_related(
                Prefetch(
                    "extended_object__title_set",
                    to_attr="published_titles",
                    queryset=Title.objects.filter(published=True),
                )).distinct()):
            # Prepare published titles
            titles = {
                t.language: t.title
                for t in course.extended_object.published_titles
            }

            # Prepare cover images
            cover_images = {}
            for cover_image in Picture.objects.filter(
                    cmsplugin_ptr__placeholder__page=course.extended_object,
                    cmsplugin_ptr__placeholder__slot="course_cover",
            ):
                # Force the image format before computing it
                cover_image.use_no_cropping = False
                cover_image.width = COURSES_COVER_IMAGE_WIDTH
                cover_image.height = COURSES_COVER_IMAGE_HEIGHT
                cover_images[
                    cover_image.cmsplugin_ptr.language] = cover_image.img_src

            # Prepare syllabus texts
            syllabus_texts = defaultdict(list)
            for simple_text in SimpleText.objects.filter(
                    cmsplugin_ptr__placeholder__page=course.extended_object,
                    cmsplugin_ptr__placeholder__slot="course_syllabus",
            ):
                syllabus_texts[simple_text.cmsplugin_ptr.language].append(
                    simple_text.body)

            # Make sure we get title information for categories in the same request
            category_pages = (
                course.get_root_to_leaf_category_pages().prefetch_related(
                    Prefetch(
                        "title_set",
                        to_attr="published_titles",
                        queryset=Title.objects.filter(published=True),
                    )).only("pk"))

            # Make sure we get title information for organizations in the same request
            organizations = (course.get_organizations().prefetch_related(
                Prefetch(
                    "extended_object__title_set",
                    to_attr="published_titles",
                    queryset=Title.objects.filter(published=True),
                )).only("extended_object").distinct())

            course_runs = course.get_course_runs()
            for course_run in course_runs:
                yield {
                    "_id":
                    str(course_run.extended_object_id),
                    "_index":
                    index,
                    "_op_type":
                    action,
                    "_type":
                    cls.document_type,
                    "start":
                    course_run.start,
                    "end":
                    course_run.end,
                    "enrollment_start":
                    course_run.enrollment_start,
                    "enrollment_end":
                    course_run.enrollment_end,
                    "absolute_url": {
                        language:
                        course_run.extended_object.get_absolute_url(language)
                        for language in titles.keys()
                    },
                    "categories": [str(page.pk) for page in category_pages],
                    # Index the names of categories to surface them in full text searches
                    "categories_names":
                    reduce(
                        lambda acc, title: {
                            **acc,
                            title.language:
                            acc[title.language] + [title.title]
                            if acc.get(title.language) else [title.title],
                        },
                        [
                            title for page in category_pages
                            for title in page.published_titles
                        ],
                        {},
                    ),
                    "complete": {
                        language: slice_string_for_completion(title)
                        for language, title in titles.items()
                    },
                    "cover_image":
                    cover_images,
                    "description":
                    {l: " ".join(st)
                     for l, st in syllabus_texts.items()},
                    "is_new":
                    len(course_runs) == 1,
                    "languages":
                    course_run.languages,
                    "organizations": [
                        str(id)
                        for id in course.get_organizations().values_list(
                            "public_extension__extended_object", flat=True)
                        if id is not None
                    ],
                    # Index the names of organizations to surface them in full text searches
                    "organizations_names":
                    reduce(
                        lambda acc, title: {
                            **acc,
                            title.language:
                            acc[title.language] + [title.title]
                            if acc.get(title.language) else [title.title],
                        },
                        [
                            title for organization in organizations for title
                            in organization.extended_object.published_titles
                        ],
                        {},
                    ),
                    "title":
                    titles,
                }
Exemplo n.º 11
0
def pokemonFullItemContext(context):
    request = context['request']
    context['stats'] = [
        OrderedDict([
            ('hit_points', {
                'value':
                context['item'].hit_points,
                'name':
                _('Hit Points'),
                'percent':
                (context['item'].hit_points /
                 django_settings.POKEMONS_MAX_STATS['hit_points']) * 100,
            }),
            ('attack', {
                'value':
                context['item'].attack,
                'name':
                _('Attack'),
                'percent':
                (context['item'].attack /
                 django_settings.POKEMONS_MAX_STATS['attack']) * 100,
            }),
            ('defense', {
                'value':
                context['item'].defense,
                'name':
                _('Defense'),
                'percent':
                (context['item'].defense /
                 django_settings.POKEMONS_MAX_STATS['defense']) * 100,
            }),
        ]),
        OrderedDict([
            ('max_cp', {
                'value':
                context['item'].max_cp,
                'name':
                _('Max CP'),
                'percent':
                (context['item'].max_cp /
                 django_settings.POKEMONS_MAX_STATS['max_cp']) * 100,
            }),
            ('catch_rate', {
                'value':
                context['item'].catch_rate,
                'name':
                _('Catch Rate'),
                'percent':
                (context['item'].catch_rate /
                 django_settings.POKEMONS_MAX_STATS['catch_rate']) * 100,
                'suffix':
                '%',
            }),
            ('flee_rate', {
                'value':
                context['item'].flee_rate,
                'name':
                _('Flee Rate'),
                'percent':
                (context['item'].flee_rate /
                 django_settings.POKEMONS_MAX_STATS['flee_rate']) * 100,
                'suffix':
                '%',
            }),
        ]),
    ]
    context['tab'] = 'collection'
    if 'tab' in request.GET and request.GET['tab'] in _full_pokemon_tabs:
        context['tab'] = request.GET['tab']
    if context['tab'] == 'collection':
        if request.user.is_authenticated():
            request.user.all_accounts = request.user.accounts.all(
            ).prefetch_related(
                Prefetch('pokedex',
                         queryset=models.Pokedex.objects.filter(
                             pokemon_id=context['item'].id),
                         to_attr='in_pokedex'),
                Prefetch('pokemons',
                         queryset=models.OwnedPokemon.objects.filter(
                             pokemon_id=context['item'].id).order_by('-cp'),
                         to_attr='all_pokemons'),
            )
            for account in request.user.all_accounts:
                if len(account.in_pokedex):
                    account.in_pokedex = account.in_pokedex[0]
                else:
                    account.in_pokedex = models.Pokedex.objects.create(
                        account=account, pokemon=context['item'])
            for op in account.all_pokemons:
                op.progress_percent = (op.cp / op.max_cp) * 100 if op.cp else 0
    elif context['tab'] == 'evolutions':
        pass
    elif context['tab'] == 'attacks':
        context['item'].all_attacks = context['item'].attacks.all().order_by(
            'is_special')
Exemplo n.º 12
0
    def actions(self) -> List:
        last_updated_action_ts = Action.objects.filter(team_id=self.team_id).aggregate(models.Max("updated_at"))[
            "updated_at__max"
        ]

        actions = (
            Action.objects.filter(
                team_id=self.team_id, steps__event=self.event, deleted=False,  # filter by event name to narrow down
            )
            .distinct("id")
            .prefetch_related(Prefetch("steps", queryset=ActionStep.objects.order_by("id")))
        )
        if not self._can_use_cached_query(last_updated_action_ts):
            TEAM_ACTION_QUERY_CACHE[self.team_id], _ = actions.query.sql_with_params()
            if len(actions) == 0:
                return []
            events: models.QuerySet[Any] = Event.objects.filter(pk=self.pk)
            for action in actions:
                events = events.annotate(
                    **{
                        f"action_{action.pk}": Event.objects.filter(pk=self.pk)
                        .query_db_by_action(action)
                        .values("id")[:1]
                    }
                )
            # This block is a little cryptic so bear with me
            # We grab the query and the params from the ORM here
            q, p = events.query.sql_with_params()

            # We then take the parameters and replace the event id's with a placeholder
            # We use this later to sub back in future event id's
            # The rest of the parameters are shared between action types
            qp = tuple(["%s" if i == self.pk else i for i in p])

            # Create a cache item and add it to the cache keyed on team_id and event id
            qcache = {self.event: (q, qp)}
            TEAM_EVENT_ACTION_QUERY_CACHE[self.team_id].update(qcache)

            # Update the last updated team action timestamp for future reference
            LAST_UPDATED_TEAM_ACTION[self.team_id] = last_updated_action_ts
        else:

            # If we have reached this block we are about to use the sql query cache
            # Grab the actions using the cached action query
            actions.raw(TEAM_ACTION_QUERY_CACHE[self.team_id])

            # Grab the cached query and query params, we will need to replace some params
            q, p = TEAM_EVENT_ACTION_QUERY_CACHE[self.team_id][self.event]

            # Replace the query param placeholders with the event id (opposite of what we did above)
            qp = tuple([self.pk if i == "%s" else i for i in p])

            with connection.cursor() as cursor:
                # Format and execute the cached query using the mostly cached params
                qstring = cursor.mogrify(q, qp)
                cursor.execute(qstring)
                events = namedtuplefetchall(cursor)

        event = [event for event in events][0]
        filtered_actions = [action for action in actions if getattr(event, f"action_{action.pk}", None)]
        return filtered_actions
Exemplo n.º 13
0
def get_grouped_items(event, subevent=None, voucher=None):
    items = event.items.all().filter(
        Q(active=True)
        & Q(Q(available_from__isnull=True) | Q(available_from__lte=now()))
        & Q(Q(available_until__isnull=True) | Q(available_until__gte=now()))
        & ~Q(category__is_addon=True))

    vouchq = Q(hide_without_voucher=False)
    if voucher:
        if voucher.item_id:
            vouchq |= Q(pk=voucher.item_id)
            items = items.filter(pk=voucher.item_id)
        elif voucher.quota_id:
            items = items.filter(quotas__in=[voucher.quota_id])

    items = items.filter(vouchq).select_related(
        'category',
        'tax_rule',  # for re-grouping
    ).prefetch_related(
        Prefetch('quotas',
                 to_attr='_subevent_quotas',
                 queryset=event.quotas.filter(subevent=subevent)),
        Prefetch('variations',
                 to_attr='available_variations',
                 queryset=ItemVariation.objects.filter(
                     active=True, quotas__isnull=False).prefetch_related(
                         Prefetch('quotas',
                                  to_attr='_subevent_quotas',
                                  queryset=event.quotas.filter(
                                      subevent=subevent))).distinct()),
    ).annotate(quotac=Count('quotas'),
               has_variations=Count('variations')).filter(
                   quotac__gt=0).order_by('category__position', 'category_id',
                                          'position', 'name')
    display_add_to_cart = False
    external_quota_cache = event.cache.get('item_quota_cache')
    quota_cache = external_quota_cache or {}

    if subevent:
        item_price_override = subevent.item_price_overrides
        var_price_override = subevent.var_price_overrides
    else:
        item_price_override = {}
        var_price_override = {}

    for item in items:
        if voucher and voucher.item_id and voucher.variation_id:
            # Restrict variations if the voucher only allows one
            item.available_variations = [
                v for v in item.available_variations
                if v.pk == voucher.variation_id
            ]

        max_per_order = item.max_per_order or int(
            event.settings.max_items_per_order)

        if not item.has_variations:
            item._remove = not bool(item._subevent_quotas)

            if voucher and (voucher.allow_ignore_quota or voucher.block_quota):
                item.cached_availability = (Quota.AVAILABILITY_OK,
                                            voucher.max_usages -
                                            voucher.redeemed)
            else:
                item.cached_availability = list(
                    item.check_quotas(subevent=subevent, _cache=quota_cache))

            item.order_max = min(
                item.cached_availability[1] if item.cached_availability[1]
                is not None else sys.maxsize, max_per_order)

            price = item_price_override.get(item.pk, item.default_price)
            if voucher:
                price = voucher.calculate_price(price)
            item.display_price = item.tax(price)

            display_add_to_cart = display_add_to_cart or item.order_max > 0
        else:
            for var in item.available_variations:
                if voucher and (voucher.allow_ignore_quota
                                or voucher.block_quota):
                    var.cached_availability = (Quota.AVAILABILITY_OK,
                                               voucher.max_usages -
                                               voucher.redeemed)
                else:
                    var.cached_availability = list(
                        var.check_quotas(subevent=subevent,
                                         _cache=quota_cache))

                var.order_max = min(
                    var.cached_availability[1] if var.cached_availability[1]
                    is not None else sys.maxsize, max_per_order)

                price = var_price_override.get(var.pk, var.price)
                if voucher:
                    price = voucher.calculate_price(price)
                var.display_price = var.tax(price)

                display_add_to_cart = display_add_to_cart or var.order_max > 0

            item.available_variations = [
                v for v in item.available_variations if v._subevent_quotas
            ]
            if voucher and voucher.variation_id:
                item.available_variations = [
                    v for v in item.available_variations
                    if v.pk == voucher.variation_id
                ]

            if len(item.available_variations) > 0:
                item.min_price = min([
                    v.display_price.net if event.settings.display_net_prices
                    else v.display_price.gross
                    for v in item.available_variations
                ])
                item.max_price = max([
                    v.display_price.net if event.settings.display_net_prices
                    else v.display_price.gross
                    for v in item.available_variations
                ])

            item._remove = not bool(item.available_variations)

    if not external_quota_cache:
        event.cache.set('item_quota_cache', quota_cache, 5)
    items = [
        item for item in items
        if (len(item.available_variations) > 0 or not item.has_variations)
        and not item._remove
    ]
    return items, display_add_to_cart
Exemplo n.º 14
0
class ChannelViewSet(
        ChannelsMixin,
        MultipleLookupDetailMixin,
        mixins.CreateModelMixin,
        mixins.RetrieveModelMixin,
        mixins.UpdateModelMixin,
        mixins.ListModelMixin,
        mixins.DestroyModelMixin,
        viewsets.GenericViewSet,
):
    url_lookups = [
        {
            "lookup_field": "uuid",
            "validator": serializers.serializers.UUIDField().to_internal_value,
        },
        {
            "lookup_field":
            "username",
            "validator":
            federation_utils.get_actor_data_from_username,
            "get_query":
            lambda v: Q(
                actor__domain=v["domain"],
                actor__preferred_username__iexact=v["username"],
            ),
        },
    ]
    filterset_class = filters.ChannelFilter
    serializer_class = serializers.ChannelSerializer
    queryset = (models.Channel.objects.all().prefetch_related(
        "library",
        "attributed_to",
        "actor",
        Prefetch("artist", queryset=ARTIST_PREFETCH_QS),
    ).order_by("-creation_date"))
    permission_classes = [
        oauth_permissions.ScopePermission,
        permissions.OwnerPermission,
    ]
    required_scope = "libraries"
    anonymous_policy = "setting"
    owner_checks = ["write"]
    owner_field = "attributed_to.user"
    owner_exception = exceptions.PermissionDenied

    def get_serializer_class(self):
        if self.request.method.lower() in ["head", "get", "options"]:
            return serializers.ChannelSerializer
        elif self.action in ["update", "partial_update"]:
            return serializers.ChannelUpdateSerializer
        return serializers.ChannelCreateSerializer

    def perform_create(self, serializer):
        return serializer.save(attributed_to=self.request.user.actor)

    def list(self, request, *args, **kwargs):
        if self.request.GET.get("output") == "opml":
            queryset = self.filter_queryset(self.get_queryset())[:500]
            opml = serializers.get_opml(
                channels=queryset,
                date=timezone.now(),
                title="Funkwhale channels OPML export",
            )
            xml_body = renderers.render_xml(
                renderers.dict_to_xml_tree("opml", opml))
            return http.HttpResponse(xml_body, content_type="application/xml")

        else:
            return super().list(request, *args, **kwargs)

    def get_object(self):
        obj = super().get_object()
        if (self.action == "retrieve"
                and self.request.GET.get("refresh", "").lower() == "true"):
            obj = music_views.refetch_obj(obj, self.get_queryset())
        return obj

    @decorators.action(
        detail=True,
        methods=["post"],
        permission_classes=[rest_permissions.IsAuthenticated],
    )
    def subscribe(self, request, *args, **kwargs):
        object = self.get_object()
        subscription = federation_models.Follow(actor=request.user.actor)
        subscription.fid = subscription.get_federation_id()
        subscription, created = SubscriptionsViewSet.queryset.get_or_create(
            target=object.actor,
            actor=request.user.actor,
            defaults={
                "approved": True,
                "fid": subscription.fid,
                "uuid": subscription.uuid,
            },
        )
        # prefetch stuff
        subscription = SubscriptionsViewSet.queryset.get(pk=subscription.pk)
        if not object.actor.is_local:
            routes.outbox.dispatch({"type": "Follow"},
                                   context={"follow": subscription})

        data = serializers.SubscriptionSerializer(subscription).data
        return response.Response(data, status=201)

    @decorators.action(
        detail=True,
        methods=["post", "delete"],
        permission_classes=[rest_permissions.IsAuthenticated],
    )
    def unsubscribe(self, request, *args, **kwargs):
        object = self.get_object()
        follow_qs = request.user.actor.emitted_follows.filter(
            target=object.actor)
        follow = follow_qs.first()
        if follow:
            if not object.actor.is_local:
                routes.outbox.dispatch(
                    {
                        "type": "Undo",
                        "object": {
                            "type": "Follow"
                        }
                    },
                    context={"follow": follow},
                )
            follow_qs.delete()
        return response.Response(status=204)

    @decorators.action(
        detail=True,
        methods=["get"],
        content_negotiation_class=renderers.PodcastRSSContentNegociation,
    )
    def rss(self, request, *args, **kwargs):
        object = self.get_object()
        if not object.attributed_to.is_local:
            return response.Response({"detail": "Not found"}, status=404)

        if object.attributed_to == actors.get_service_actor():
            # external feed, we redirect to the canonical one
            return http.HttpResponseRedirect(object.rss_url)

        uploads = (object.library.uploads.playable_by(None).prefetch_related(
            Prefetch(
                "track",
                queryset=music_models.Track.objects.select_related(
                    "attachment_cover", "description").prefetch_related(
                        music_views.TAG_PREFETCH, ),
            ), ).select_related(
                "track__attachment_cover",
                "track__description").order_by("-creation_date"))[:50]
        data = serializers.rss_serialize_channel_full(channel=object,
                                                      uploads=uploads)
        return response.Response(data, status=200)

    @decorators.action(
        methods=["get"],
        detail=False,
        url_path="metadata-choices",
        url_name="metadata_choices",
        permission_classes=[],
    )
    def metedata_choices(self, request, *args, **kwargs):
        data = {
            "language": [{
                "value": code,
                "label": name
            } for code, name in locales.ISO_639_CHOICES],
            "itunes_category": [{
                "value": code,
                "label": code,
                "children": children
            } for code, children in categories.ITUNES_CATEGORIES.items()],
        }
        return response.Response(data)

    @decorators.action(
        methods=["post"],
        detail=False,
        url_path="rss-subscribe",
        url_name="rss_subscribe",
    )
    @transaction.atomic
    def rss_subscribe(self, request, *args, **kwargs):
        serializer = serializers.RssSubscribeSerializer(data=request.data)
        if not serializer.is_valid():
            return response.Response(serializer.errors, status=400)
        channel = (models.Channel.objects.filter(
            rss_url=serializer.validated_data["url"], ).order_by("id").first())
        if not channel:
            # try to retrieve the channel via its URL and create it
            try:
                channel, uploads = serializers.get_channel_from_rss_url(
                    serializer.validated_data["url"])
            except serializers.FeedFetchException as e:
                return response.Response(
                    {"detail": str(e)},
                    status=400,
                )

        subscription = federation_models.Follow(actor=request.user.actor)
        subscription.fid = subscription.get_federation_id()
        subscription, created = SubscriptionsViewSet.queryset.get_or_create(
            target=channel.actor,
            actor=request.user.actor,
            defaults={
                "approved": True,
                "fid": subscription.fid,
                "uuid": subscription.uuid,
            },
        )
        # prefetch stuff
        subscription = SubscriptionsViewSet.queryset.get(pk=subscription.pk)

        return response.Response(
            serializers.SubscriptionSerializer(subscription).data, status=201)

    def get_serializer_context(self):
        context = super().get_serializer_context()
        context["subscriptions_count"] = self.action in [
            "retrieve",
            "create",
            "update",
            "partial_update",
        ]
        if self.request.user.is_authenticated:
            context["actor"] = self.request.user.actor
        return context

    @transaction.atomic
    def perform_destroy(self, instance):
        instance.__class__.objects.filter(pk=instance.pk).delete()
        common_utils.on_commit(federation_tasks.remove_actor.delay,
                               actor_id=instance.actor.pk)
Exemplo n.º 15
0
def get_grouped_items(event, subevent=None, voucher=None, channel='web', require_seat=0):
    items = event.items.using(settings.DATABASE_REPLICA).filter_available(channel=channel, voucher=voucher).select_related(
        'category', 'tax_rule',  # for re-grouping
        'hidden_if_available',
    ).prefetch_related(
        Prefetch('quotas',
                 to_attr='_subevent_quotas',
                 queryset=event.quotas.using(settings.DATABASE_REPLICA).filter(subevent=subevent)),
        Prefetch('bundles',
                 queryset=ItemBundle.objects.using(settings.DATABASE_REPLICA).prefetch_related(
                     Prefetch('bundled_item',
                              queryset=event.items.using(settings.DATABASE_REPLICA).select_related('tax_rule').prefetch_related(
                                  Prefetch('quotas',
                                           to_attr='_subevent_quotas',
                                           queryset=event.quotas.using(settings.DATABASE_REPLICA).filter(subevent=subevent)),
                              )),
                     Prefetch('bundled_variation',
                              queryset=ItemVariation.objects.using(
                                  settings.DATABASE_REPLICA
                              ).select_related('item', 'item__tax_rule').filter(item__event=event).prefetch_related(
                                  Prefetch('quotas',
                                           to_attr='_subevent_quotas',
                                           queryset=event.quotas.using(settings.DATABASE_REPLICA).filter(subevent=subevent)),
                              )),
                 )),
        Prefetch('variations', to_attr='available_variations',
                 queryset=ItemVariation.objects.using(settings.DATABASE_REPLICA).filter(active=True, quotas__isnull=False).prefetch_related(
                     Prefetch('quotas',
                              to_attr='_subevent_quotas',
                              queryset=event.quotas.using(settings.DATABASE_REPLICA).filter(subevent=subevent))
                 ).distinct()),
    ).annotate(
        quotac=Count('quotas'),
        has_variations=Count('variations'),
        requires_seat=Exists(
            SeatCategoryMapping.objects.filter(
                product_id=OuterRef('pk'),
                subevent=subevent
            )
        )
    ).filter(
        quotac__gt=0,
    ).order_by('category__position', 'category_id', 'position', 'name')
    if require_seat:
        items = items.filter(requires_seat__gt=0)
    else:
        items = items.filter(requires_seat=0)
    display_add_to_cart = False
    external_quota_cache = event.cache.get('item_quota_cache')
    quota_cache = external_quota_cache or {}

    if subevent:
        item_price_override = subevent.item_price_overrides
        var_price_override = subevent.var_price_overrides
    else:
        item_price_override = {}
        var_price_override = {}

    restrict_vars = set()
    if voucher and voucher.quota_id:
        # If a voucher is set to a specific quota, we need to filter out on that level
        restrict_vars = set(voucher.quota.variations.all())

    for item in items:
        if voucher and voucher.item_id and voucher.variation_id:
            # Restrict variations if the voucher only allows one
            item.available_variations = [v for v in item.available_variations
                                         if v.pk == voucher.variation_id]

        if get_all_sales_channels()[channel].unlimited_items_per_order:
            max_per_order = sys.maxsize
        else:
            max_per_order = item.max_per_order or int(event.settings.max_items_per_order)

        if item.hidden_if_available:
            q = item.hidden_if_available.availability(_cache=quota_cache)
            if q[0] == Quota.AVAILABILITY_OK:
                item._remove = True
                continue

        item.description = str(item.description)
        for recv, resp in item_description.send(sender=event, item=item, variation=None):
            if resp:
                item.description += ("<br/>" if item.description else "") + resp

        if not item.has_variations:
            item._remove = False
            if not bool(item._subevent_quotas):
                item._remove = True
                continue

            if voucher and (voucher.allow_ignore_quota or voucher.block_quota):
                item.cached_availability = (
                    Quota.AVAILABILITY_OK, voucher.max_usages - voucher.redeemed
                )
            else:
                item.cached_availability = list(
                    item.check_quotas(subevent=subevent, _cache=quota_cache, include_bundled=True)
                )

            if event.settings.hide_sold_out and item.cached_availability[0] < Quota.AVAILABILITY_RESERVED:
                item._remove = True
                continue

            item.order_max = min(
                item.cached_availability[1]
                if item.cached_availability[1] is not None else sys.maxsize,
                max_per_order
            )

            original_price = item_price_override.get(item.pk, item.default_price)
            if voucher:
                price = voucher.calculate_price(original_price)
            else:
                price = original_price

            item.display_price = item.tax(price, currency=event.currency, include_bundled=True)

            if price != original_price:
                item.original_price = item.tax(original_price, currency=event.currency, include_bundled=True)
            else:
                item.original_price = (
                    item.tax(item.original_price, currency=event.currency, include_bundled=True,
                             base_price_is='net' if event.settings.display_net_prices else 'gross')  # backwards-compat
                    if item.original_price else None
                )

            display_add_to_cart = display_add_to_cart or item.order_max > 0
        else:
            for var in item.available_variations:
                var.description = str(var.description)
                for recv, resp in item_description.send(sender=event, item=item, variation=var):
                    if resp:
                        var.description += ("<br/>" if var.description else "") + resp

                if voucher and (voucher.allow_ignore_quota or voucher.block_quota):
                    var.cached_availability = (
                        Quota.AVAILABILITY_OK, voucher.max_usages - voucher.redeemed
                    )
                else:
                    var.cached_availability = list(
                        var.check_quotas(subevent=subevent, _cache=quota_cache, include_bundled=True)
                    )

                var.order_max = min(
                    var.cached_availability[1]
                    if var.cached_availability[1] is not None else sys.maxsize,
                    max_per_order
                )

                original_price = var_price_override.get(var.pk, var.price)
                if voucher:
                    price = voucher.calculate_price(original_price)
                else:
                    price = original_price

                var.display_price = var.tax(price, currency=event.currency, include_bundled=True)

                if price != original_price:
                    var.original_price = var.tax(original_price, currency=event.currency, include_bundled=True)
                else:
                    var.original_price = (
                        var.tax(var.original_price or item.original_price, currency=event.currency,
                                include_bundled=True,
                                base_price_is='net' if event.settings.display_net_prices else 'gross')  # backwards-compat
                    ) if var.original_price or item.original_price else None

                display_add_to_cart = display_add_to_cart or var.order_max > 0

            item.original_price = (
                item.tax(item.original_price, currency=event.currency, include_bundled=True,
                         base_price_is='net' if event.settings.display_net_prices else 'gross')  # backwards-compat
                if item.original_price else None
            )

            item.available_variations = [
                v for v in item.available_variations if v._subevent_quotas and (
                    not voucher or not voucher.quota_id or v in restrict_vars
                )
            ]

            if event.settings.hide_sold_out:
                item.available_variations = [v for v in item.available_variations
                                             if v.cached_availability[0] >= Quota.AVAILABILITY_RESERVED]

            if voucher and voucher.variation_id:
                item.available_variations = [v for v in item.available_variations
                                             if v.pk == voucher.variation_id]

            if len(item.available_variations) > 0:
                item.min_price = min([v.display_price.net if event.settings.display_net_prices else
                                      v.display_price.gross for v in item.available_variations])
                item.max_price = max([v.display_price.net if event.settings.display_net_prices else
                                      v.display_price.gross for v in item.available_variations])

            item._remove = not bool(item.available_variations)

    if not external_quota_cache:
        event.cache.set('item_quota_cache', quota_cache, 5)
    items = [item for item in items
             if (len(item.available_variations) > 0 or not item.has_variations) and not item._remove]
    return items, display_add_to_cart
Exemplo n.º 16
0
 def with_user_orders(self, user):
     qs = (Order.objects.filter(
         user=user).with_total_value().with_payment().all())
     return self.prefetch_related(
         Prefetch('order_set', queryset=qs, to_attr='user_orders'))
Exemplo n.º 17
0
class XFormViewSet(AnonymousUserPublicFormsMixin,
                   CacheControlMixin,
                   AuthenticateHeaderMixin,
                   ETagsMixin,
                   LabelsMixin,
                   BaseViewset,
                   ModelViewSet):
    """
    Publish XLSForms, List, Retrieve Published Forms.
    """

    renderer_classes = api_settings.DEFAULT_RENDERER_CLASSES + [
        renderers.XLSRenderer,
        renderers.XLSXRenderer,
        renderers.CSVRenderer,
        renderers.CSVZIPRenderer,
        renderers.SAVZIPRenderer,
        renderers.SurveyRenderer,
        renderers.OSMExportRenderer,
        renderers.ZipRenderer,
        renderers.GoogleSheetsRenderer
    ]
    queryset = XForm.objects.select_related('user', 'created_by')\
        .prefetch_related(
            Prefetch(
                'xformuserobjectpermission_set',
                queryset=XFormUserObjectPermission.objects.select_related(
                    'user__profile__organizationprofile',
                    'permission'
                )
            ),
            Prefetch('metadata_set'),
            Prefetch('tags'),
            Prefetch('dataview_set')
        ).only(
            'id', 'id_string', 'title', 'shared', 'shared_data',
            'require_auth', 'created_by', 'num_of_submissions',
            'downloadable', 'encrypted', 'sms_id_string',
            'date_created', 'date_modified', 'last_submission_time',
            'uuid', 'bamboo_dataset', 'instances_with_osm',
            'instances_with_geopoints', 'version', 'has_hxl_support',
            'project', 'last_updated_at', 'user', 'allows_sms', 'description',
            'is_merged_dataset'
        )
    serializer_class = XFormSerializer
    lookup_field = 'pk'
    extra_lookup_fields = None
    permission_classes = [XFormPermissions, ]
    updatable_fields = set(('description', 'downloadable', 'require_auth',
                            'shared', 'shared_data', 'title'))
    filter_backends = (filters.EnketoAnonDjangoObjectPermissionFilter,
                       filters.TagFilter,
                       filters.XFormOwnerFilter,
                       DjangoFilterBackend)
    filter_fields = ('instances_with_osm',)

    public_forms_endpoint = 'public'

    def get_serializer_class(self):
        if self.action == 'list':
            return XFormBaseSerializer

        return super(XFormViewSet, self).get_serializer_class()

    def create(self, request, *args, **kwargs):
        try:
            owner = _get_owner(request)
        except ValidationError as e:
            return Response({'message': e.messages[0]},
                            status=status.HTTP_400_BAD_REQUEST)

        survey = utils.publish_xlsform(request, owner)
        if isinstance(survey, XForm):
            # survey is a DataDictionary we need an XForm to return the correct
            # role for the user after form publishing.
            serializer = XFormCreateSerializer(
                survey, context={'request': request})
            headers = self.get_success_headers(serializer.data)

            return Response(serializer.data, status=status.HTTP_201_CREATED,
                            headers=headers)

        return Response(survey, status=status.HTTP_400_BAD_REQUEST)

    @action(methods=['POST', 'GET'], detail=False)
    def create_async(self, request, *args, **kwargs):
        """ Temporary Endpoint for Async form creation """
        resp = headers = {}
        resp_code = status.HTTP_400_BAD_REQUEST

        if request.method == 'GET':
            self.etag_data = '{}'.format(timezone.now())
            survey = tasks.get_async_status(
                request.query_params.get('job_uuid'))

            if 'pk' in survey:
                xform = XForm.objects.get(pk=survey.get('pk'))
                serializer = XFormSerializer(
                    xform, context={'request': request})
                headers = self.get_success_headers(serializer.data)
                resp = serializer.data
                resp_code = status.HTTP_201_CREATED
            else:
                resp_code = status.HTTP_202_ACCEPTED
                resp.update(survey)
        else:
            try:
                owner = _get_owner(request)
            except ValidationError as e:
                return Response({'message': e.messages[0]},
                                status=status.HTTP_400_BAD_REQUEST)

            fname = request.FILES.get('xls_file').name
            if isinstance(request.FILES.get('xls_file'), InMemoryUploadedFile):
                xls_file_path = default_storage.save(
                        f'tmp/async-upload-{owner.username}-{fname}',
                        ContentFile(request.FILES.get('xls_file').read()))
            else:
                xls_file_path = request.FILES.get(
                    'xls_file').temporary_file_path()

            resp.update(
                {u'job_uuid':
                 tasks.publish_xlsform_async.delay(
                     request.user.id,
                     request.POST,
                     owner.id,
                     {'name': fname, 'path': xls_file_path}).task_id})
            resp_code = status.HTTP_202_ACCEPTED

        return Response(data=resp, status=resp_code, headers=headers)

    @action(methods=['GET', 'HEAD'], detail=True)
    @never_cache
    def form(self, request, format='json', **kwargs):
        form = self.get_object()
        if format not in ['json', 'xml', 'xls', 'csv']:
            return HttpResponseBadRequest('400 BAD REQUEST',
                                          content_type='application/json',
                                          status=400)
        self.etag_data = '{}'.format(form.date_modified)
        filename = form.id_string + "." + format
        response = response_for_format(form, format=format)
        response['Content-Disposition'] = 'attachment; filename=' + filename

        return response

    @action(methods=['GET'], detail=False)
    def login(self, request, **kwargs):
        return_url = request.query_params.get('return')

        if return_url:
            redirect = parse_webform_return_url(return_url, request)

            if redirect:
                return redirect

            login_vars = {"login_url": settings.ENKETO_CLIENT_LOGIN_URL,
                          "return_url": urlencode({'return_url': return_url})}
            client_login = '******'.format(**login_vars)

            return HttpResponseRedirect(client_login)

        return HttpResponseForbidden(
            "Authentication failure, cannot redirect")

    @action(methods=['GET'], detail=True)
    def enketo(self, request, **kwargs):
        """Expose enketo urls."""
        survey_type = self.kwargs.get('survey_type') or \
            request.GET.get('survey_type')
        self.object = self.get_object()
        form_url = get_form_url(
            request, self.object.user.username,
            protocol=settings.ENKETO_PROTOCOL,
            xform_pk=self.object.pk, generate_consistent_urls=True)

        data = {'message': _(u"Enketo not properly configured.")}
        http_status = status.HTTP_400_BAD_REQUEST

        try:
            # pass default arguments to enketo_url to prepopulate form fields
            request_vars = request.GET
            defaults = generate_enketo_form_defaults(
                self.object, **request_vars)
            url = enketo_url(
                form_url, self.object.id_string, **defaults)
            preview_url = get_enketo_preview_url(request,
                                                 self.object.user.username,
                                                 self.object.id_string,
                                                 xform_pk=self.object.pk)
        except EnketoError as e:
            data = {'message': _(u"Enketo error: %s" % e)}
        else:
            if survey_type == 'single':
                single_submit_url = get_enketo_single_submit_url(
                    request, self.object.user.username, self.object.id_string,
                    xform_pk=self.object.pk)
                data = {"single_submit_url": single_submit_url}
            elif url and preview_url:
                http_status = status.HTTP_200_OK
                data = {"enketo_url": url,
                        "enketo_preview_url": preview_url}

        return Response(data, http_status)

    @action(methods=['POST', 'GET'], detail=False)
    def survey_preview(self, request, **kwargs):
        username = request.user.username
        if request.method.upper() == 'POST':
            if not username:
                raise ParseError("User has to be authenticated")

            csv_data = request.data.get('body')
            if csv_data:
                rand_name = "survey_draft_%s.csv" % ''.join(
                    random.sample("abcdefghijklmnopqrstuvwxyz0123456789", 6))
                csv_file = ContentFile(csv_data)
                csv_name = default_storage.save(
                    upload_to_survey_draft(rand_name, username),
                    csv_file)

                result = publish_form(lambda: get_survey_xml(csv_name))

                if result_has_error(result):
                    raise ParseError(result.get('text'))

                return Response(
                    {'unique_string': rand_name, 'username': username},
                    status=200)
            else:
                raise ParseError('Missing body')

        if request.method.upper() == 'GET':
            filename = request.query_params.get('filename')
            username = request.query_params.get('username')

            if not username:
                raise ParseError('Username not provided')
            if not filename:
                raise ParseError("Filename MUST be provided")

            csv_name = upload_to_survey_draft(filename, username)

            result = publish_form(lambda: get_survey_xml(csv_name))

            if result_has_error(result):
                raise ParseError(result.get('text'))

            self.etag_data = result

            return Response(result, status=200)

    def retrieve(self, request, *args, **kwargs):
        lookup_field = self.lookup_field
        lookup = self.kwargs.get(lookup_field)

        if lookup == self.public_forms_endpoint:
            self.object_list = self._get_public_forms_queryset()

            page = self.paginate_queryset(self.object_list)
            if page is not None:
                serializer = self.get_pagination_serializer(page)
            else:
                serializer = self.get_serializer(self.object_list, many=True)

            return Response(serializer.data)

        xform = self.get_object()
        export_type = kwargs.get('format') or \
            request.query_params.get('format')
        query = request.query_params.get("query")
        token = request.GET.get('token')
        meta = request.GET.get('meta')

        if export_type is None or export_type in ['json', 'debug']:
            # perform default viewset retrieve, no data export
            return super(XFormViewSet, self).retrieve(request, *args, **kwargs)

        return custom_response_handler(request,
                                       xform,
                                       query,
                                       export_type,
                                       token,
                                       meta)

    @action(methods=['POST'], detail=True)
    def share(self, request, *args, **kwargs):
        self.object = self.get_object()

        usernames_str = request.data.get("usernames",
                                         request.data.get("username"))

        if not usernames_str:
            return Response(status=status.HTTP_400_BAD_REQUEST)

        role = request.data.get("role")  # the serializer validates the role
        xform_id = self.object.pk
        data_list = [{"xform":    xform_id,
                      "username": username,
                      "role":     role}
                     for username in usernames_str.split(",")]

        serializer = ShareXFormSerializer(data=data_list, many=True)

        if serializer.is_valid():
            serializer.save()
        else:
            return Response(data=serializer.errors,
                            status=status.HTTP_400_BAD_REQUEST)

        return Response(status=status.HTTP_204_NO_CONTENT)

    @action(methods=['POST'], detail=True)
    def clone(self, request, *args, **kwargs):
        self.object = self.get_object()
        data = {'xform': self.object.pk,
                'username': request.data.get('username')}
        project = request.data.get('project_id')
        if project:
            data['project'] = project
        serializer = CloneXFormSerializer(data=data)
        if serializer.is_valid():
            clone_to_user = User.objects.get(username=data['username'])
            if not request.user.has_perm('can_add_xform',
                                         clone_to_user.profile):
                raise exceptions.PermissionDenied(
                    detail=_(u"User %(user)s has no permission to add "
                             "xforms to account %(account)s" %
                             {'user': request.user.username,
                              'account': data['username']}))
            try:
                xform = serializer.save()
            except IntegrityError:
                raise ParseError(
                    'A clone with the same id_string has already been created')
            serializer = XFormSerializer(
                xform.cloned_form, context={'request': request})

            return Response(data=serializer.data,
                            status=status.HTTP_201_CREATED)

        return Response(data=serializer.errors,
                        status=status.HTTP_400_BAD_REQUEST)

    @action(
        methods=['POST', 'GET'], detail=True,
        url_name='import', url_path='import')
    def data_import(self, request, *args, **kwargs):
        """ Endpoint for CSV and XLS data imports
        Calls :py:func:`onadata.libs.utils.csv_import.submit_csv` for POST
        requests passing the `request.FILES.get('csv_file')` upload
        for import and
        :py:func:onadata.libs.utils.csv_import.get_async_csv_submission_status
        for GET requests passing `job_uuid` query param for job progress
        polling and
        :py:func:`onadata.libs.utils.csv_import.submission_xls_to_csv`
        for POST request passing the `request.FILES.get('xls_file')` upload for
        import if xls_file is provided instead of csv_file
        """
        self.object = self.get_object()
        resp = {}
        if request.method == 'GET':
            try:
                resp.update(get_async_csv_submission_status(
                    request.query_params.get('job_uuid')))
                self.last_modified_date = timezone.now()
            except ValueError:
                raise ParseError(('The instance of the result is not a '
                                  'basestring; the job_uuid variable might '
                                  'be incorrect'))
        else:
            csv_file = request.FILES.get('csv_file', None)
            xls_file = request.FILES.get('xls_file', None)

            if csv_file is None and xls_file is None:
                resp.update({u'error': u'csv_file and xls_file field empty'})

            elif xls_file and \
                    xls_file.name.split('.')[-1] not in XLS_EXTENSIONS:
                resp.update({u'error': u'xls_file not an excel file'})

            elif csv_file and csv_file.name.split('.')[-1] != CSV_EXTENSION:
                resp.update({u'error': u'csv_file not a csv file'})

            else:
                if xls_file and xls_file.name.split('.')[-1] in XLS_EXTENSIONS:
                    csv_file = submission_xls_to_csv(xls_file)
                overwrite = request.query_params.get('overwrite')
                overwrite = True \
                    if overwrite and overwrite.lower() == 'true' else False
                size_threshold = settings.CSV_FILESIZE_IMPORT_ASYNC_THRESHOLD
                try:
                    csv_size = csv_file.size
                except AttributeError:
                    csv_size = csv_file.__sizeof__()
                if csv_size < size_threshold:
                    resp.update(submit_csv(request.user.username,
                                           self.object, csv_file, overwrite))
                else:
                    csv_file.seek(0)
                    upload_to = os.path.join(request.user.username,
                                             'csv_imports', csv_file.name)
                    file_name = default_storage.save(upload_to, csv_file)
                    task = submit_csv_async.delay(request.user.username,
                                                  self.object.pk, file_name,
                                                  overwrite)
                    if task is None:
                        raise ParseError('Task not found')
                    else:
                        resp.update({u'task_id': task.task_id})

        return Response(
            data=resp,
            status=status.HTTP_200_OK if resp.get('error') is None else
            status.HTTP_400_BAD_REQUEST)

    @action(methods=['POST', 'GET'], detail=True)
    def csv_import(self, request, *args, **kwargs):
        """ Endpoint for CSV data imports
        Calls :py:func:`onadata.libs.utils.csv_import.submit_csv` for POST
        requests passing the `request.FILES.get('csv_file')` upload
        for import and
        :py:func:onadata.libs.utils.csv_import.get_async_csv_submission_status
        for GET requests passing `job_uuid` query param for job progress
        polling
        """
        self.object = self.get_object()
        resp = {}
        if request.method == 'GET':
            try:
                resp.update(get_async_csv_submission_status(
                    request.query_params.get('job_uuid')))
                self.last_modified_date = timezone.now()
            except ValueError:
                raise ParseError(('The instance of the result is not a '
                                  'basestring; the job_uuid variable might '
                                  'be incorrect'))
        else:
            csv_file = request.FILES.get('csv_file', None)
            if csv_file is None:
                resp.update({u'error': u'csv_file field empty'})
            elif csv_file.name.split('.')[-1] != CSV_EXTENSION:
                resp.update({u'error': u'csv_file not a csv file'})
            else:
                overwrite = request.query_params.get('overwrite')
                overwrite = True \
                    if overwrite and overwrite.lower() == 'true' else False
                size_threshold = settings.CSV_FILESIZE_IMPORT_ASYNC_THRESHOLD
                if csv_file.size < size_threshold:
                    resp.update(submit_csv(request.user.username,
                                           self.object, csv_file, overwrite))
                else:
                    csv_file.seek(0)
                    upload_to = os.path.join(request.user.username,
                                             'csv_imports', csv_file.name)
                    file_name = default_storage.save(upload_to, csv_file)
                    task = submit_csv_async.delay(request.user.username,
                                                  self.object.pk, file_name,
                                                  overwrite)
                    if task is None:
                        raise ParseError('Task not found')
                    else:
                        resp.update({u'task_id': task.task_id})

        return Response(
            data=resp,
            status=status.HTTP_200_OK if resp.get('error') is None else
            status.HTTP_400_BAD_REQUEST)

    def partial_update(self, request, *args, **kwargs):
        self.object = self.get_object()
        owner = self.object.user

        # updating the file
        if request.FILES or set(['xls_url',
                                 'dropbox_xls_url',
                                 'text_xls_form']) & set(request.data):
            return _try_update_xlsform(request, self.object, owner)

        try:
            return super(XFormViewSet, self).partial_update(request, *args,
                                                            **kwargs)
        except XLSFormError as e:
            raise ParseError(str(e))

    @action(methods=['DELETE', 'GET'], detail=True)
    def delete_async(self, request, *args, **kwargs):
        if request.method == 'DELETE':
            xform = self.get_object()
            resp = {
                u'job_uuid': tasks.delete_xform_async.delay(
                    xform.pk,
                    request.user.id).task_id,
                u'time_async_triggered': datetime.now()}

            # clear project from cache
            safe_delete(f'{PROJ_OWNER_CACHE}{xform.project.pk}')
            resp_code = status.HTTP_202_ACCEPTED

        elif request.method == 'GET':
            job_uuid = request.query_params.get('job_uuid')
            resp = tasks.get_async_status(job_uuid)
            resp_code = status.HTTP_202_ACCEPTED
            self.etag_data = '{}'.format(timezone.now())

        return Response(data=resp, status=resp_code)

    def destroy(self, request, *args, **kwargs):
        xform = self.get_object()
        user = request.user
        xform.soft_delete(user=user)

        return Response(status=status.HTTP_204_NO_CONTENT)

    @action(methods=['GET'], detail=True)
    def export_async(self, request, *args, **kwargs):
        job_uuid = request.query_params.get('job_uuid')
        export_type = request.query_params.get('format')
        query = request.query_params.get("query")
        xform = self.get_object()

        token = request.query_params.get('token')
        meta = request.query_params.get('meta')
        data_id = request.query_params.get('data_id')
        options = parse_request_export_options(request.query_params)

        options.update({
            'meta': meta,
            'token': token,
            'data_id': data_id,
        })
        if query:
            options.update({'query': query})

        if job_uuid:
            try:
                resp = get_async_response(job_uuid, request, xform)
            except Export.DoesNotExist:
                # if this does not exist retry it against the primary
                try:
                    with use_master:
                        resp = get_async_response(job_uuid, request, xform)
                except NameError:
                    resp = get_async_response(job_uuid, request, xform)
        else:
            resp = process_async_export(request, xform, export_type, options)

            if isinstance(resp, HttpResponseRedirect):
                payload = {
                    "details": _("Google authorization needed"),
                    "url": resp.url
                }
                return Response(data=payload,
                                status=status.HTTP_403_FORBIDDEN,
                                content_type="application/json")

        self.etag_data = '{}'.format(timezone.now())

        return Response(data=resp,
                        status=status.HTTP_202_ACCEPTED,
                        content_type="application/json")

    def _get_streaming_response(self):
        """
        Get a StreamingHttpResponse response object
        """
        # use queryset_iterator.  Will need to change this to the Django
        # native .iterator() method when we upgrade to django version 2
        # because in Django 2 .iterator() has support for chunk size
        queryset = queryset_iterator(self.object_list, chunksize=2000)

        def get_json_string(item):
            return json.dumps(XFormBaseSerializer(
                instance=item,
                context={'request': self.request}
                ).data)

        response = StreamingHttpResponse(
            json_stream(queryset, get_json_string),
            content_type="application/json"
        )

        # calculate etag value and add it to response headers
        if hasattr(self, 'etag_data'):
            self.set_etag_header(None, self.etag_data)

        self.set_cache_control(response)

        # set headers on streaming response
        for k, v in self.headers.items():
            response[k] = v

        return response

    def list(self, request, *args, **kwargs):
        STREAM_DATA = getattr(settings, 'STREAM_DATA', False)
        try:
            queryset = self.filter_queryset(self.get_queryset())
            last_modified = queryset.values_list('date_modified', flat=True)\
                .order_by('-date_modified')
            if last_modified:
                self.etag_data = last_modified[0].isoformat()
            if STREAM_DATA:
                self.object_list = queryset
                resp = self._get_streaming_response()
            else:
                resp = super(XFormViewSet, self).list(request, *args, **kwargs)
        except XLSFormError as e:
            resp = HttpResponseBadRequest(e)

        return resp
Exemplo n.º 18
0
 def get_teams(self, obj):
     teams = Team.objects.filter(challenge=obj).prefetch_related('competitors') \
             .prefetch_related(Prefetch('competitors__stats', queryset=Stats.objects.filter(challenge=obj)))
     return TeamSerializer(teams, many=True).data
Exemplo n.º 19
0
class XFormViewSet(AnonymousUserPublicFormsMixin,
                   CacheControlMixin,
                   AuthenticateHeaderMixin,
                   ETagsMixin,
                   LabelsMixin,
                   BaseViewset,
                   ModelViewSet):
    """
    Publish XLSForms, List, Retrieve Published Forms.
    """

    renderer_classes = api_settings.DEFAULT_RENDERER_CLASSES + [
        renderers.XLSRenderer,
        renderers.XLSXRenderer,
        renderers.CSVRenderer,
        renderers.CSVZIPRenderer,
        renderers.SAVZIPRenderer,
        renderers.SurveyRenderer,
        renderers.OSMExportRenderer,
        renderers.ZipRenderer,
        renderers.GoogleSheetsRenderer
    ]
    queryset = XForm.objects.select_related('user', 'created_by')\
        .prefetch_related(
            Prefetch(
                'xformuserobjectpermission_set',
                queryset=XFormUserObjectPermission.objects.select_related(
                    'user__profile__organizationprofile',
                    'permission'
                )
            ),
            Prefetch('metadata_set'),
            Prefetch('tags'),
            Prefetch('dataview_set')
        )
    serializer_class = XFormSerializer
    lookup_field = 'pk'
    extra_lookup_fields = None
    permission_classes = [XFormPermissions, ]
    updatable_fields = set(('description', 'downloadable', 'require_auth',
                            'shared', 'shared_data', 'title'))
    filter_backends = (filters.AnonDjangoObjectPermissionFilter,
                       filters.TagFilter,
                       filters.XFormOwnerFilter,
                       DjangoFilterBackend)
    filter_fields = ('instances_with_osm',)

    public_forms_endpoint = 'public'

    def get_serializer_class(self):
        if self.action == 'list':
            return XFormBaseSerializer

        return super(XFormViewSet, self).get_serializer_class()

    def create(self, request, *args, **kwargs):
        try:
            owner = _get_owner(request)
        except ValidationError as e:
            return Response({'message': e.messages[0]},
                            status=status.HTTP_400_BAD_REQUEST)

        survey = utils.publish_xlsform(request, owner)
        if isinstance(survey, XForm):
            serializer = XFormCreateSerializer(
                survey, context={'request': request})
            headers = self.get_success_headers(serializer.data)

            return Response(serializer.data, status=status.HTTP_201_CREATED,
                            headers=headers)

        return Response(survey, status=status.HTTP_400_BAD_REQUEST)

    @list_route(methods=['POST', 'GET'])
    def create_async(self, request, *args, **kwargs):
        """ Temporary Endpoint for Async form creation """
        resp = headers = {}
        resp_code = status.HTTP_400_BAD_REQUEST

        if request.method == 'GET':
            self.etag_data = '{}'.format(timezone.now())
            survey = tasks.get_async_status(
                request.query_params.get('job_uuid'))

            if 'pk' in survey:
                xform = XForm.objects.get(pk=survey.get('pk'))
                serializer = XFormSerializer(
                    xform, context={'request': request})
                headers = self.get_success_headers(serializer.data)
                resp = serializer.data
                resp_code = status.HTTP_201_CREATED
            else:
                resp_code = status.HTTP_202_ACCEPTED
                resp.update(survey)
        else:
            try:
                owner = _get_owner(request)
            except ValidationError as e:
                return Response({'message': e.messages[0]},
                                status=status.HTTP_400_BAD_REQUEST)

            fname = request.FILES.get('xls_file').name
            resp.update(
                {u'job_uuid':
                 tasks.publish_xlsform_async.delay(
                     request.user, request.POST, owner,
                     ({'name': fname,
                       'data': request.FILES.get('xls_file').read()}
                      if isinstance(request.FILES.get('xls_file'),
                                    InMemoryUploadedFile) else
                      {'name': fname,
                       'path': request.FILES.get(
                           'xls_file').temporary_file_path()})).task_id})
            resp_code = status.HTTP_202_ACCEPTED

        return Response(data=resp, status=resp_code, headers=headers)

    @detail_route()
    @never_cache
    def form(self, request, format='json', **kwargs):
        form = self.get_object()
        if format not in ['json', 'xml', 'xls']:
            return HttpResponseBadRequest('400 BAD REQUEST',
                                          content_type='application/json',
                                          status=400)
        self.etag_data = '{}'.format(form.date_modified)
        filename = form.id_string + "." + format
        response = response_for_format(form, format=format)
        response['Content-Disposition'] = 'attachment; filename=' + filename

        return response

    @list_route(methods=['GET'])
    def login(self, request, **kwargs):
        return_url = request.query_params.get('return')

        if return_url:
            redirect = parse_webform_return_url(return_url, request)

            if redirect:
                return redirect

            login_vars = {"login_url": settings.ENKETO_CLIENT_LOGIN_URL,
                          "return_url": urlencode({'return_url': return_url})}
            client_login = '******'.format(**login_vars)

            return HttpResponseRedirect(client_login)

        return HttpResponseForbidden(
            "Authentication failure, cannot redirect")

    @detail_route()
    def enketo(self, request, **kwargs):
        self.object = self.get_object()
        form_url = get_form_url(
            request, self.object.user.username, settings.ENKETO_PROTOCOL)

        data = {'message': _(u"Enketo not properly configured.")}
        http_status = status.HTTP_400_BAD_REQUEST

        try:
            # pass default arguments to enketo_url to prepopulate form fields
            request_vars = request.GET
            defaults = generate_enketo_form_defaults(
                self.object, **request_vars)
            url = enketo_url(form_url, self.object.id_string, **defaults)
            preview_url = get_enketo_preview_url(request,
                                                 self.object.user.username,
                                                 self.object.id_string)
        except EnketoError as e:
            data = {'message': _(u"Enketo error: %s" % e)}
        else:
            if url and preview_url:
                http_status = status.HTTP_200_OK
                data = {"enketo_url": url, "enketo_preview_url": preview_url}

        return Response(data, http_status)

    @list_route(methods=['POST', 'GET'])
    def survey_preview(self, request, **kwargs):
        username = request.user.username
        if request.method.upper() == 'POST':
            if not username:
                raise ParseError("User has to be authenticated")

            csv_data = request.data.get('body')
            if csv_data:
                rand_name = "survey_draft_%s.csv" % ''.join(
                    random.sample("abcdefghijklmnopqrstuvwxyz0123456789", 6))
                csv_file = ContentFile(csv_data)
                csv_name = default_storage.save(
                    upload_to_survey_draft(rand_name, username),
                    csv_file)

                result = publish_form(lambda: get_survey_xml(csv_name))

                if result_has_error(result):
                    raise ParseError(result.get('text'))

                return Response(
                    {'unique_string': rand_name, 'username': username},
                    status=200)
            else:
                raise ParseError('Missing body')

        if request.method.upper() == 'GET':
            filename = request.query_params.get('filename')
            username = request.query_params.get('username')

            if not username:
                raise ParseError('Username not provided')
            if not filename:
                raise ParseError("Filename MUST be provided")

            csv_name = upload_to_survey_draft(filename, username)

            result = publish_form(lambda: get_survey_xml(csv_name))

            if result_has_error(result):
                raise ParseError(result.get('text'))

            self.etag_data = result

            return Response(result, status=200)

    def retrieve(self, request, *args, **kwargs):
        lookup_field = self.lookup_field
        lookup = self.kwargs.get(lookup_field)

        if lookup == self.public_forms_endpoint:
            self.object_list = self._get_public_forms_queryset()

            page = self.paginate_queryset(self.object_list)
            if page is not None:
                serializer = self.get_pagination_serializer(page)
            else:
                serializer = self.get_serializer(self.object_list, many=True)

            return Response(serializer.data)

        xform = self.get_object()
        export_type = kwargs.get('format') or \
            request.query_params.get('format')
        query = request.query_params.get("query", {})
        token = request.GET.get('token')
        meta = request.GET.get('meta')

        if export_type is None or export_type in ['json', 'debug']:
            # perform default viewset retrieve, no data export
            return super(XFormViewSet, self).retrieve(request, *args, **kwargs)

        return custom_response_handler(request,
                                       xform,
                                       query,
                                       export_type,
                                       token,
                                       meta)

    @detail_route(methods=['POST'])
    def share(self, request, *args, **kwargs):
        self.object = self.get_object()

        usernames_str = request.data.get("usernames",
                                         request.data.get("username"))

        if not usernames_str:
            return Response(status=status.HTTP_400_BAD_REQUEST)

        role = request.data.get("role")  # the serializer validates the role
        xform_id = self.object.pk
        data_list = [{"xform":    xform_id,
                      "username": username,
                      "role":     role}
                     for username in usernames_str.split(",")]

        serializer = ShareXFormSerializer(data=data_list, many=True)

        if serializer.is_valid():
            serializer.save()
        else:
            return Response(data=serializer.errors,
                            status=status.HTTP_400_BAD_REQUEST)

        return Response(status=status.HTTP_204_NO_CONTENT)

    @detail_route(methods=['POST'])
    def clone(self, request, *args, **kwargs):
        self.object = self.get_object()
        data = {'xform': self.object.pk,
                'username': request.data.get('username')}
        project = request.data.get('project_id')
        if project:
            data['project'] = project
        serializer = CloneXFormSerializer(data=data)
        if serializer.is_valid():
            clone_to_user = User.objects.get(username=data['username'])
            if not request.user.has_perm('can_add_xform',
                                         clone_to_user.profile):
                raise exceptions.PermissionDenied(
                    detail=_(u"User %(user)s has no permission to add "
                             "xforms to account %(account)s" %
                             {'user': request.user.username,
                              'account': data['username']}))
            try:
                xform = serializer.save()
            except IntegrityError:
                raise ParseError(
                    'A clone with the same id_string has already been created')
            serializer = XFormSerializer(
                xform.cloned_form, context={'request': request})

            return Response(data=serializer.data,
                            status=status.HTTP_201_CREATED)

        return Response(data=serializer.errors,
                        status=status.HTTP_400_BAD_REQUEST)

    @detail_route(methods=['POST', 'GET'])
    def csv_import(self, request, *args, **kwargs):
        """ Endpoint for CSV data imports
        Calls :py:func:`onadata.libs.utils.csv_import.submit_csv` for POST
        requests passing the `request.FILES.get('csv_file')` upload
        for import and
        :py:func:onadata.libs.utils.csv_import.get_async_csv_submission_status
        for GET requests passing `job_uuid` query param for job progress
        polling
        """
        self.object = self.get_object()
        resp = {}
        if request.method == 'GET':
            try:
                resp.update(get_async_csv_submission_status(
                    request.query_params.get('job_uuid')))
                self.last_modified_date = timezone.now()
            except ValueError:
                raise ParseError(('The instance of the result is not a '
                                  'basestring; the job_uuid variable might '
                                  'be incorrect'))
        else:
            csv_file = request.FILES.get('csv_file', None)
            if csv_file is None:
                resp.update({u'error': u'csv_file field empty'})
            else:
                num_rows = sum(1 for row in csv_file) - 1
                if num_rows < settings.CSV_ROW_IMPORT_ASYNC_THRESHOLD:
                    resp.update(submit_csv(request.user.username,
                                           self.object, csv_file))
                else:
                    tmp_file_path = utils.generate_tmp_path(csv_file)
                    task = submit_csv_async.delay(request.user.username,
                                                  self.object,
                                                  tmp_file_path)
                    if task is None:
                        raise ParseError('Task not found')
                    else:
                        resp.update({u'task_id': task.task_id})

        return Response(
            data=resp,
            status=status.HTTP_200_OK if resp.get('error') is None else
            status.HTTP_400_BAD_REQUEST)

    def partial_update(self, request, *args, **kwargs):
        self.object = self.get_object()
        owner = self.object.user

        # updating the file
        if request.FILES or set(['xls_url',
                                 'dropbox_xls_url',
                                 'text_xls_form']) & set(request.data.keys()):
            return _try_update_xlsform(request, self.object, owner)

        return super(XFormViewSet, self).partial_update(request, *args,
                                                        **kwargs)

    @detail_route(methods=['DELETE', 'GET'])
    def delete_async(self, request, *args, **kwargs):
        if request.method == 'DELETE':
            xform = self.get_object()
            resp = {
                u'job_uuid': tasks.delete_xform_async.delay(xform).task_id,
                u'time_async_triggered': datetime.now()}
            resp_code = status.HTTP_202_ACCEPTED

        elif request.method == 'GET':
            job_uuid = request.query_params.get('job_uuid')
            resp = tasks.get_async_status(job_uuid)
            resp_code = status.HTTP_202_ACCEPTED
            self.etag_data = '{}'.format(timezone.now())

        return Response(data=resp, status=resp_code)

    def destroy(self, request, *args, **kwargs):
        xform = self.get_object()
        xform.soft_delete()

        return Response(status=status.HTTP_204_NO_CONTENT)

    @detail_route(methods=['GET'])
    def export_async(self, request, *args, **kwargs):
        job_uuid = request.query_params.get('job_uuid')
        export_type = request.query_params.get('format')
        query = request.query_params.get("query")
        xform = self.get_object()

        token = request.query_params.get('token')
        meta = request.query_params.get('meta')
        data_id = request.query_params.get('data_id')
        options = parse_request_export_options(request.query_params)

        options.update({
            'meta': meta,
            'token': token,
            'data_id': data_id,
            'query': query,
        })

        if job_uuid:
            try:
                resp = get_async_response(job_uuid, request, xform)
            except Export.DoesNotExist:
                # if this does not exist retry it against the primary
                with use_master:
                    resp = get_async_response(job_uuid, request, xform)
        else:
            resp = process_async_export(request, xform, export_type, options)

            if isinstance(resp, HttpResponseRedirect):
                payload = {
                    "details": _("Google authorization needed"),
                    "url": resp.url
                }
                return Response(data=payload,
                                status=status.HTTP_403_FORBIDDEN,
                                content_type="application/json")

        self.etag_data = '{}'.format(timezone.now())

        return Response(data=resp,
                        status=status.HTTP_202_ACCEPTED,
                        content_type="application/json")

    def list(self, request, *args, **kwargs):
        try:
            queryset = self.filter_queryset(self.get_queryset())
            last_modified = queryset.values_list('date_modified', flat=True)\
                .order_by('-date_modified')
            if last_modified:
                self.etag_data = last_modified[0]
            resp = super(XFormViewSet, self).list(request, *args, **kwargs)
        except XLSFormError, e:
            resp = HttpResponseBadRequest(e.message)

        return resp
Exemplo n.º 20
0
def run_multiple_use_cases(id="", userid="", data_times=""):
    '''运行多条用例'''
    module_prefetch = Prefetch("moduleInfo",
                               queryset=ModuleInfo.objects.order_by(
                                   "module_order"))  # 这里是把我们需要过滤的东西在这里过滤
    use_prefetch = Prefetch("usecase_set",
                            queryset=Usecase.objects.order_by("case_order"))
    module_data = ProjectInfo.objects.filter(id=id).prefetch_related(
        module_prefetch)  # prefetch_related只用在一对多或多对一或多对多
    use_data = ModuleInfo.objects.prefetch_related(use_prefetch)
    for i in module_data:
        print(i.project_name)
        module_all = i.moduleInfo.all()  # 这里可以使用all(),但不能使用filter()否则会产生多条查询语句
        for sub_data in module_all:
            for use_data_all in use_data:
                if sub_data.module_name == use_data_all.module_name:
                    print(use_data_all.module_name)
                    usecase_data_all = use_data_all.usecase_set.all()
                    for usecase in usecase_data_all:
                        print(usecase.case_name)
                        method = usecase.req  # 请求方式
                        print(method)
                        url1 = usecase.use_case1.host_url
                        url2 = usecase.case_url  # 请求的后面的url
                        url = url1 + url2  # 用例总url
                        print(url)
                        headers = header(
                            key=eval(usecase.header_key),
                            value=eval(
                                reg(json_keyvalue=usecase.header_value)))
                        if usecase.value_type == "form-data":
                            datas = data(key=eval(usecase.data_key),
                                         alltype=eval(usecase.data_type),
                                         value=eval(usecase.data_value))
                        else:
                            datas = eval(
                                reg(json_keyvalue=usecase.json_keyvalue))
                        if method == "GET":
                            req = req_mode(method=method,
                                           url=url,
                                           params=datas,
                                           headers=headers,
                                           zt=usecase.value_type)
                        else:
                            req = req_mode(method=method,
                                           url=url,
                                           data=datas,
                                           headers=headers,
                                           zt=usecase.value_type)
                        dy_keylist = get_value(req=req,
                                               key=eval(usecase.dy_key))
                        all_extract_stat = read_req(key=eval(
                            usecase.all_extract),
                                                    req_ode=req)
                        if all_extract_stat == []:  # 如果空就pass跳过
                            pass
                        else:
                            for read in range(0,
                                              len(eval(usecase.all_extract))):
                                cache.set(
                                    eval(usecase.all_extract)[read],
                                    all_extract_stat[read], 3600)
                                # result = cache.get(eval(i.all_extract)[0])#这个是取内存内容
                        asserts = assertion(key=dy_keylist,
                                            alltype=eval(usecase.dy_type),
                                            value=eval(usecase.dy_value))
                        print(data_times)
                        operation = operation_information.objects.create(
                            information=req,
                            result=asserts,
                            use_case_mode="many",
                            dy_value=dy_keylist,
                            function_time=data_times,
                            user_id=userid,
                            use_id=usecase.id)
                        # operation.save()
                        usecase_data_all.update(state=asserts)
                else:
                    pass
Exemplo n.º 21
0
def waitinglist_widgets(sender, subevent=None, lazy=False, **kwargs):
    widgets = []

    wles = WaitingListEntry.objects.filter(event=sender,
                                           subevent=subevent,
                                           voucher__isnull=True)
    if wles.exists():
        if not lazy:
            quota_cache = {}
            happy = 0
            tuples = wles.values(
                'item', 'variation').order_by().annotate(cnt=Count('id'))

            items = {
                i.pk: i
                for i in sender.items.filter(
                    id__in=[t['item'] for t in tuples]).prefetch_related(
                        Prefetch('quotas',
                                 to_attr='_subevent_quotas',
                                 queryset=sender.quotas.using(
                                     settings.DATABASE_REPLICA).filter(
                                         subevent=subevent)), )
            }
            vars = {
                i.pk: i
                for i in ItemVariation.objects.filter(
                    item__event=sender,
                    id__in=[t['variation'] for t in tuples
                            if t['variation']]).prefetch_related(
                                Prefetch('quotas',
                                         to_attr='_subevent_quotas',
                                         queryset=sender.quotas.using(
                                             settings.DATABASE_REPLICA).filter(
                                                 subevent=subevent)), )
            }

            for wlt in tuples:
                item = items.get(wlt['item'])
                variation = vars.get(wlt['variation'])
                if not item:
                    continue
                quotas = (variation._get_quotas(subevent=subevent) if variation
                          else item._get_quotas(subevent=subevent))
                row = (variation.check_quotas(subevent=subevent,
                                              count_waitinglist=False,
                                              _cache=quota_cache) if variation
                       else item.check_quotas(subevent=subevent,
                                              count_waitinglist=False,
                                              _cache=quota_cache))
                if row[1] is None:
                    happy += 1
                elif row[1] > 0:
                    happy += 1
                    for q in quotas:
                        if q.size is not None:
                            quota_cache[q.pk] = (quota_cache[q.pk][0],
                                                 quota_cache[q.pk][1] - 1)

        widgets.append({
            'content':
            None if lazy else NUM_WIDGET.format(
                num=str(happy),
                text=_('available to give to people on waiting list')),
            'lazy':
            'waitinglist-avail',
            'priority':
            50,
            'url':
            reverse('control:event.orders.waitinglist',
                    kwargs={
                        'event': sender.slug,
                        'organizer': sender.organizer.slug,
                    })
        })
        widgets.append({
            'content':
            None if lazy else NUM_WIDGET.format(
                num=str(wles.count()), text=_('total waiting list length')),
            'lazy':
            'waitinglist-length',
            'display_size':
            'small',
            'priority':
            50,
            'url':
            reverse('control:event.orders.waitinglist',
                    kwargs={
                        'event': sender.slug,
                        'organizer': sender.organizer.slug,
                    })
        })

    return widgets
Exemplo n.º 22
0
 def prefetch_tags_with_posts_count(self):
     return self.prefetch_related(
         Prefetch('tags', queryset=Tag.objects.annotate(Count('posts'))))
Exemplo n.º 23
0
 def get(self, request, slug):
     comment_query = Comment.objects.annotate(count_like=Count("users_like")).select_related("author")
     comments = Prefetch("comments", comment_query)
     book = Book.objects.prefetch_related("authors", comments).get(slug=slug)
     return render(request, "book_detail.html", {"book": book, "rate": 2})
Exemplo n.º 24
0
    def generateFullReport(self, pk, base_url):
        self.base_url = base_url

        # Our container for 'Flowable' objects
        elements = []
        toc = TableOfContents()
        toc.levelStyles = [
            PS(fontName='arialuni',
               fontSize=12,
               name='TOCHeading1',
               leftIndent=20,
               firstLineIndent=-20,
               spaceBefore=5,
               leading=10),
            PS(fontName='arialuni',
               fontSize=10,
               name='TOCHeading2',
               leftIndent=40,
               firstLineIndent=-20,
               spaceBefore=3,
               leading=10),
            PS(fontName='arialuni',
               fontSize=9,
               name='TOCHeading3',
               leftIndent=40,
               firstLineIndent=-20,
               spaceBefore=3,
               leading=10),
        ]
        elements.append(Paragraph('Responses Report for Site', self.centered))
        elements.append(PageBreak())
        elements.append(Paragraph('Table of contents', self.centered))
        elements.append(toc)
        elements.append(PageBreak())

        # A large collection of style sheets pre-made for us
        styles = getSampleStyleSheet()
        styles.add(ParagraphStyle(name='centered', alignment=TA_CENTER))
        site = Site.objects.select_related('project').get(pk=pk)
        self.project_name = site.project.name
        self.project_logo = site.project.logo.url

        elements.append(Paragraph(site.name, self.h1))
        elements.append(Paragraph(site.identifier, styles['Normal']))
        if site.address:
            elements.append(Paragraph(site.address, styles['Normal']))
        if site.phone:
            elements.append(Paragraph(site.phone, styles['Normal']))
        if site.region:
            elements.append(Paragraph(site.region.name, styles['Normal']))

        elements.append(PageBreak())
        elements.append(Paragraph('Responses', self.h2))

        forms = FieldSightXF.objects.select_related('xf').filter(
            is_survey=False, is_deleted=False).filter(
                Q(site_id=site.id, from_project=False)
                | Q(project_id=site.project_id)).prefetch_related(
                    Prefetch(
                        'site_form_instances',
                        queryset=FInstance.objects.select_related('instance')),
                    Prefetch(
                        'project_form_instances',
                        queryset=FInstance.objects.select_related(
                            'instance').filter(site_id=site.id))).order_by(
                                '-is_staged', 'is_scheduled')

        if not forms:
            elements.append(
                Paragraph("No Any Responses Yet.", styles['Heading5']))
        #a=FieldSightXF.objects.select_related('xf').filter(site_id=291).prefetch_related(Prefetch('site_form_instances', queryset=FInstance.objects.select_related('instance')))

        styNormal = styleSheet['Normal']
        styBackground = ParagraphStyle('background',
                                       parent=styNormal,
                                       backColor=colors.white)

        for form in forms:
            elements.append(Spacer(0, 10))
            elements.append(Paragraph(form.xf.title, self.h3))
            elements.append(
                Paragraph(form.form_type() + " Form", styles['Heading4']))
            if form.stage:
                if form.stage.stage:
                    elements.append(
                        Paragraph("Stage Id: " + str(form.stage.stage.order),
                                  self.paragraphstyle))
                    elements.append(
                        Paragraph("Sub Stage Id: " + str(form.stage.order),
                                  self.paragraphstyle))
                else:
                    elements.append(
                        Paragraph("Stage Id: " + str(form.stage.order),
                                  self.paragraphstyle))

            json_question = form.xf.json
            form_user_name = form.xf.user.username
            self.media_folder = form_user_name

            #cursor = get_instaces_for_site_individual_form(form.id)

            sub_count = 0

            if not form.from_project and form.site_form_instances.all():
                for instance in form.site_form_instances.all():
                    self.instance_id = instance.instance_id
                    self.append_answers(json_question, instance, sub_count)

            elif form.project_form_instances.all():
                for instance in form.project_form_instances.all():
                    self.instance_id = instance.instance_id
                    self.append_answers(json_question, instance, sub_count)

            else:
                elements.append(
                    Paragraph("No Submisions Yet. ", styles['Heading5']))
                elements.append(Spacer(0, 10))
        self.doc.multiBuild(elements, onLaterPages=self._header_footer)
Exemplo n.º 25
0
    def __init__(self, *args, **kwargs):
        """
        Takes additional keyword arguments:

        :param category: The category to choose from
        :param event: The event this belongs to
        :param subevent: The event the parent cart position belongs to
        :param initial: The current set of add-ons
        :param quota_cache: A shared dictionary for quota caching
        :param item_cache: A shared dictionary for item/category caching
        """
        category = kwargs.pop('category')
        event = kwargs.pop('event')
        subevent = kwargs.pop('subevent')
        current_addons = kwargs.pop('initial')
        quota_cache = kwargs.pop('quota_cache')
        item_cache = kwargs.pop('item_cache')
        self.price_included = kwargs.pop('price_included')

        super().__init__(*args, **kwargs)

        if subevent:
            item_price_override = subevent.item_price_overrides
            var_price_override = subevent.var_price_overrides
        else:
            item_price_override = {}
            var_price_override = {}

        ckey = '{}-{}'.format(subevent.pk if subevent else 0, category.pk)
        if ckey not in item_cache:
            # Get all items to possibly show
            items = category.items.filter(
                Q(active=True)
                & Q(
                    Q(available_from__isnull=True)
                    | Q(available_from__lte=now()))
                & Q(
                    Q(available_until__isnull=True)
                    | Q(available_until__gte=now()))
                & Q(hide_without_voucher=False)
            ).select_related('tax_rule').prefetch_related(
                Prefetch('quotas',
                         to_attr='_subevent_quotas',
                         queryset=event.quotas.filter(subevent=subevent)),
                Prefetch('variations',
                         to_attr='available_variations',
                         queryset=ItemVariation.objects.filter(
                             active=True,
                             quotas__isnull=False).prefetch_related(
                                 Prefetch('quotas',
                                          to_attr='_subevent_quotas',
                                          queryset=event.quotas.filter(
                                              subevent=subevent))).distinct()),
            ).annotate(quotac=Count('quotas'),
                       has_variations=Count('variations')).filter(
                           quotac__gt=0).order_by('category__position',
                                                  'category_id', 'position',
                                                  'name')
            item_cache[ckey] = items
        else:
            items = item_cache[ckey]

        for i in items:
            if i.has_variations:
                choices = [('', _('no selection'), '')]
                for v in i.available_variations:
                    cached_availability = v.check_quotas(subevent=subevent,
                                                         _cache=quota_cache)
                    if v._subevent_quotas:
                        choices.append(
                            (v.pk,
                             self._label(event,
                                         v,
                                         cached_availability,
                                         override_price=var_price_override.get(
                                             v.pk)), v.description))

                field = AddOnVariationField(
                    choices=choices,
                    label=i.name,
                    required=False,
                    widget=AddOnRadioSelect,
                    help_text=rich_text(str(i.description)),
                    initial=current_addons.get(i.pk),
                )
                if len(choices) > 1:
                    self.fields['item_%s' % i.pk] = field
            else:
                if not i._subevent_quotas:
                    continue
                cached_availability = i.check_quotas(subevent=subevent,
                                                     _cache=quota_cache)
                field = forms.BooleanField(
                    label=self._label(event,
                                      i,
                                      cached_availability,
                                      override_price=item_price_override.get(
                                          i.pk)),
                    required=False,
                    initial=i.pk in current_addons,
                    help_text=rich_text(str(i.description)),
                )
                self.fields['item_%s' % i.pk] = field
Exemplo n.º 26
0
    def _needed_remote_artifacts(self, batch):
        """
        Build a list of only :class:`~pulpcore.plugin.models.RemoteArtifact` that need
        to be created for the batch.

        Args:
            batch (list): List of :class:`~pulpcore.plugin.stages.DeclarativeContent`.

        Returns:
            List: Of :class:`~pulpcore.plugin.models.RemoteArtifact`.
        """
        remotes_present = set()
        for d_content in batch:
            # If the attribute is set in a previous batch on the very first item in this batch, the
            # rest of the items in this batch will not get the attribute set during prefetch.
            # https://code.djangoproject.com/ticket/32089
            if hasattr(d_content.content, "_remote_artifact_saver_cas"):
                delattr(d_content.content, "_remote_artifact_saver_cas")

            for d_artifact in d_content.d_artifacts:
                if d_artifact.remote:
                    remotes_present.add(d_artifact.remote)

        prefetch_related_objects(
            [d_c.content for d_c in batch],
            Prefetch(
                "contentartifact_set",
                queryset=ContentArtifact.objects.prefetch_related(
                    Prefetch(
                        "remoteartifact_set",
                        queryset=RemoteArtifact.objects.filter(remote__in=remotes_present),
                        to_attr="_remote_artifact_saver_ras",
                    )
                ),
                to_attr="_remote_artifact_saver_cas",
            ),
        )

        # Now return the list of RemoteArtifacts that need to be saved.
        #
        # We can end up with duplicates (diff pks, same sha256) in the sequence below,
        # so we store by-sha256 and then return the final values
        needed_ras = {}  # { str(<sha256>): RemoteArtifact, ... }
        for d_content in batch:
            for d_artifact in d_content.d_artifacts:
                if not d_artifact.remote:
                    continue

                for content_artifact in d_content.content._remote_artifact_saver_cas:
                    if d_artifact.relative_path == content_artifact.relative_path:
                        break
                else:
                    msg = _('No declared artifact with relative path "{rp}" for content "{c}"')
                    raise ValueError(
                        msg.format(rp=content_artifact.relative_path, c=d_content.content)
                    )

                for remote_artifact in content_artifact._remote_artifact_saver_ras:
                    if remote_artifact.remote_id == d_artifact.remote.pk:
                        break
                else:
                    remote_artifact = self._create_remote_artifact(d_artifact, content_artifact)
                    needed_ras[remote_artifact.sha256] = remote_artifact

        return list(needed_ras.values())
Exemplo n.º 27
0
def handle_upload_content(slug, code, part, f, user):
    """
    Update translations in the database from uploaded file.

    :arg str slug: Project slug.
    :arg str code: Locale code.
    :arg str part: Resource path or Subpage name.
    :arg UploadedFile f: UploadedFile instance.
    :arg User user: User uploading the file.
    """
    # Avoid circular import; someday we should refactor to avoid.
    from pontoon.sync import formats
    from pontoon.sync.changeset import ChangeSet
    from pontoon.sync.vcs.models import VCSProject
    from pontoon.base.models import (
        ChangedEntityLocale,
        Entity,
        Locale,
        Project,
        Resource,
        TranslatedResource,
        Translation,
    )

    relative_path = _get_relative_path_from_part(slug, part)
    project = get_object_or_404(Project, slug=slug)
    locale = get_object_or_404(Locale, code=code)
    resource = get_object_or_404(Resource,
                                 project__slug=slug,
                                 path=relative_path)

    # Store uploaded file to a temporary file and parse it
    extension = os.path.splitext(f.name)[1]
    with tempfile.NamedTemporaryFile(
            prefix="strings" if extension == ".xml" else "",
            suffix=extension,
    ) as temp:
        for chunk in f.chunks():
            temp.write(chunk)
        temp.flush()
        resource_file = formats.parse(temp.name)

    # Update database objects from file
    changeset = ChangeSet(project, VCSProject(project, locales=[locale]),
                          timezone.now())
    entities_qs = (Entity.objects.filter(
        resource__project=project,
        resource__path=relative_path,
        obsolete=False).prefetch_related(
            Prefetch(
                "translation_set",
                queryset=Translation.objects.filter(locale=locale),
                to_attr="db_translations",
            )).prefetch_related(
                Prefetch(
                    "translation_set",
                    queryset=Translation.objects.filter(
                        locale=locale, approved_date__lte=timezone.now()),
                    to_attr="db_translations_approved_before_sync",
                )))
    entities_dict = {entity.key: entity for entity in entities_qs}

    for vcs_translation in resource_file.translations:
        key = vcs_translation.key
        if key in entities_dict:
            entity = entities_dict[key]
            changeset.update_entity_translations_from_vcs(
                entity,
                locale.code,
                vcs_translation,
                user,
                entity.db_translations,
                entity.db_translations_approved_before_sync,
            )

    changeset.bulk_create_translations()
    changeset.bulk_update_translations()
    changeset.bulk_log_actions()

    if changeset.changed_translations:
        # Update 'active' status of all changed translations and their siblings,
        # i.e. translations of the same entity to the same locale.
        changed_pks = {t.pk for t in changeset.changed_translations}
        (Entity.objects.filter(
            translation__pk__in=changed_pks).reset_active_translations(
                locale=locale))

        # Run checks and create TM entries for translations that pass them
        valid_translations = changeset.bulk_check_translations()
        changeset.bulk_create_translation_memory_entries(valid_translations)

    TranslatedResource.objects.get(resource=resource,
                                   locale=locale).calculate_stats()

    # Mark translations as changed
    changed_entities = {}
    existing = ChangedEntityLocale.objects.values_list("entity",
                                                       "locale").distinct()
    for t in changeset.changed_translations:
        key = (t.entity.pk, t.locale.pk)
        # Remove duplicate changes to prevent unique constraint violation
        if key not in existing:
            changed_entities[key] = ChangedEntityLocale(entity=t.entity,
                                                        locale=t.locale)

    ChangedEntityLocale.objects.bulk_create(changed_entities.values())

    # Update latest translation
    if changeset.translations_to_create:
        changeset.translations_to_create[-1].update_latest_translation()
Exemplo n.º 28
0
def operator_vehicles(request, slug):
    operators = Operator.objects.select_related('region')
    try:
        operator = get_object_or_404(operators, slug=slug)
    except Http404:
        operator = get_object_or_404(operators, operatorcode__code=slug, operatorcode__source__name='slug')
    vehicles = operator.vehicle_set
    latest_journeys = Subquery(VehicleJourney.objects.filter(
        vehicle=OuterRef('pk')
    ).order_by('-datetime').values('pk')[:1])
    latest_journeys = vehicles.filter(latest_location=None).annotate(latest_journey=latest_journeys)
    latest_journeys = VehicleJourney.objects.filter(id__in=latest_journeys.values('latest_journey'))
    prefetch = Prefetch('vehiclejourney_set',
                        queryset=latest_journeys.select_related('service'), to_attr='latest_journeys')
    vehicles = vehicles.prefetch_related(prefetch)
    vehicles = vehicles.order_by('fleet_number', 'reg', 'code')
    vehicles = vehicles.select_related('vehicle_type', 'livery', 'latest_location__journey__service')

    edit = request.path.endswith('/edit')
    submitted = False
    if edit:
        form = EditVehiclesForm(request.POST, operator=operator, initial={
            'operator': operator
        })
        if request.POST and form.is_valid():
            ticked_vehicles = (vehicle for vehicle in vehicles if str(vehicle.id) in request.POST.getlist('vehicle'))
            data = {key: form.cleaned_data[key] for key in form.changed_data}
            submitted = len(VehicleEdit.objects.bulk_create(
                get_vehicle_edit(vehicle, data) for vehicle in ticked_vehicles
            ))
            if form.cleaned_data.get('operator') and form.cleaned_data['operator'] != operator:
                Vehicle.objects.filter(id__in=request.POST.getlist('vehicle')).update(operator=operator)
    else:
        form = None
        pending_edits = VehicleEdit.objects.filter(approved=False, vehicle=OuterRef('id'))
        vehicles = vehicles.annotate(pending_edits=Exists(pending_edits))

    if not vehicles:
        raise Http404()

    if operator.name == 'National Express':
        for v in vehicles:
            parts = v.notes.split()
            if parts and parts[-1][-1].isdigit():
                v.fleet_number = parts[-1]
                if v.fleet_number.isdigit():
                    v.fleet_number = int(v.fleet_number)
                v.notes = ' '.join(parts[:-1])
        vehicles = sorted(vehicles, key=lambda v: v.fleet_number if type(v.fleet_number) is int else 0)
        vehicles = sorted(vehicles, key=lambda v: v.notes or 'z')

    return render(request, 'operator_vehicles.html', {
        'breadcrumb': [operator.region, operator],
        'object': operator,
        'today': timezone.localtime().date(),
        'vehicles': vehicles,
        'code_column': any(v.fleet_number_mismatch() for v in vehicles),
        'branding_column': any(vehicle.branding for vehicle in vehicles),
        'name_column': any(vehicle.name for vehicle in vehicles),
        'notes_column': any(vehicle.notes for vehicle in vehicles),
        'edit_url': reverse('admin:vehicles_vehicle_changelist'),
        'edit': edit,
        'submitted': submitted,
        'form': form,
    })
Exemplo n.º 29
0
def index_enterprise_catalog_courses_in_algolia_task(
    content_keys,
    algolia_fields,
    uuid_batch_size=ALGOLIA_UUID_BATCH_SIZE,
):
    """
    Index course data in Algolia with enterprise-related fields.

    Arguments:
        content_keys (list): A list of content_keys.  It's important that this is the first positional argument,
            so that the passing of return values to the signature of the next chained celery task
            works as expected.
        algolia_fields (list): A list of course fields we want to index in Algolia
        uuid_batch_size (int): The threshold of distinct catalog/customer UUIDs associated with a piece of content,
            at which duplicate course records are created in the index,
            batching the uuids (flattened records) to reduce the payload size of the Algolia objects.
            Defaults to ``ALGOLIA_UUID_BATCH_SIZE``.
    """
    algolia_client = get_initialized_algolia_client()

    if not algolia_fields or not content_keys:
        logger.error(
            'Must provide algolia_fields and content_keys as arguments.')
        return

    # Update the index in batches
    for content_keys_batch in batch(content_keys, batch_size=TASK_BATCH_SIZE):
        courses = []
        catalog_uuids_by_course_key = defaultdict(set)
        customer_uuids_by_course_key = defaultdict(set)

        # retrieve ContentMetadata records that match the specified content_keys in the
        # content_key or parent_content_key. returns both courses and course runs.
        query = Q(content_key__in=content_keys_batch) | Q(
            parent_content_key__in=content_keys_batch)

        catalog_queries = CatalogQuery.objects.prefetch_related(
            'enterprise_catalogs', )
        content_metadata = ContentMetadata.objects.filter(
            query).prefetch_related(
                Prefetch('catalog_queries', queryset=catalog_queries), )

        # iterate through ContentMetadata records, retrieving the enterprise_catalog_uuids
        # and enterprise_customer_uuids associated with each ContentMetadata record (either
        # a course or a course run), storing them in a dictionary with the related course's
        # content_key as a key for later retrieval. the course's content_key is determined by
        # the content_key field if the metadata is a `COURSE` or by the parent_content_key
        # field if the metadata is a `COURSE_RUN`.
        for metadata in content_metadata:
            is_course_content_type = metadata.content_type == COURSE
            course_content_key = metadata.content_key if is_course_content_type else metadata.parent_content_key
            associated_queries = metadata.catalog_queries.all()
            enterprise_catalog_uuids = set()
            enterprise_customer_uuids = set()
            for query in associated_queries:
                associated_catalogs = query.enterprise_catalogs.all()
                for catalog in associated_catalogs:
                    enterprise_catalog_uuids.add(str(catalog.uuid))
                    enterprise_customer_uuids.add(str(catalog.enterprise_uuid))

            # add to any existing enterprise catalog uuids or enterprise customer uuids
            catalog_uuids_by_course_key[course_content_key].update(
                enterprise_catalog_uuids)
            customer_uuids_by_course_key[course_content_key].update(
                enterprise_customer_uuids)

        # iterate through only the courses, retrieving the enterprise-related uuids from the
        # dictionary created above. there is at least 2 duplicate course records per course,
        # each including the catalog uuids and customer uuids respectively.
        #
        # if the number of uuids for both catalogs/customers exceeds uuid_batch_size, then
        # create duplicate course records, batching the uuids (flattened records) to reduce
        # the payload size of the Algolia objects.
        course_content_metadata = content_metadata.filter(content_type=COURSE)
        for metadata in course_content_metadata:
            content_key = metadata.content_key
            # add enterprise-related uuids to json_metadata
            json_metadata = copy.deepcopy(metadata.json_metadata)
            json_metadata.update({
                'objectID':
                get_algolia_object_id(json_metadata.get('uuid')),
            })

            # enterprise catalog uuids
            catalog_uuids = sorted(
                list(catalog_uuids_by_course_key[content_key]))
            batched_metadata = _batched_metadata(
                json_metadata,
                catalog_uuids,
                'enterprise_catalog_uuids',
                '{}-catalog-uuids-{}',
                uuid_batch_size,
            )
            courses.extend(batched_metadata)

            # enterprise customer uuids
            customer_uuids = sorted(
                list(customer_uuids_by_course_key[content_key]))
            batched_metadata = _batched_metadata(
                json_metadata,
                customer_uuids,
                'enterprise_customer_uuids',
                '{}-customer-uuids-{}',
                uuid_batch_size,
            )
            courses.extend(batched_metadata)

        # extract out only the fields we care about and send to Algolia index
        algolia_objects = create_algolia_objects_from_courses(
            courses, algolia_fields)
        algolia_client.partially_update_index(algolia_objects)
Exemplo n.º 30
0
 def get_queryset(self):
     return Recipe.objects.prefetch_related(
         Prefetch('ingredients',
                  queryset=Ingredient.objects.select_related(
                      'food').order_by('rank')))
Exemplo n.º 31
0
def async_dupe_delete(*args, **kwargs):
    try:
        system_settings = System_Settings.objects.get()
        enabled = system_settings.delete_dupulicates
        dupe_max = system_settings.max_dupes
        total_duplicate_delete_count_max_per_run = settings.DUPE_DELETE_MAX_PER_RUN
    except System_Settings.DoesNotExist:
        enabled = False

    if enabled and dupe_max is None:
        logger.info(
            'skipping deletion of excess duplicates: max_dupes not configured')
        return

    if enabled:
        logger.info(
            "delete excess duplicates (max_dupes per finding: %s, max deletes per run: %s)",
            dupe_max, total_duplicate_delete_count_max_per_run)
        deduplicationLogger.info(
            "delete excess duplicates (max_dupes per finding: %s, max deletes per run: %s)",
            dupe_max, total_duplicate_delete_count_max_per_run)

        # limit to 100 to prevent overlapping jobs
        results = Finding.objects \
                .filter(duplicate=True) \
                .order_by() \
                .values('duplicate_finding') \
                .annotate(num_dupes=Count('id')) \
                .filter(num_dupes__gt=dupe_max)[:total_duplicate_delete_count_max_per_run]

        originals_with_too_many_duplicates_ids = [
            result['duplicate_finding'] for result in results
        ]

        originals_with_too_many_duplicates = Finding.objects.filter(
            id__in=originals_with_too_many_duplicates_ids).order_by('id')

        # prefetch to make it faster
        originals_with_too_many_duplicates = originals_with_too_many_duplicates.prefetch_related(
            (Prefetch("original_finding",
                      queryset=Finding.objects.filter(
                          duplicate=True).order_by('date'))))

        total_deleted_count = 0
        for original in originals_with_too_many_duplicates:
            duplicate_list = original.original_finding.all()
            dupe_count = len(duplicate_list) - dupe_max

            for finding in duplicate_list:
                deduplicationLogger.debug(
                    'deleting finding {}:{} ({}))'.format(
                        finding.id, finding.title, finding.hash_code))
                finding.delete()
                total_deleted_count += 1
                dupe_count -= 1
                if dupe_count <= 0:
                    break
                if total_deleted_count >= total_duplicate_delete_count_max_per_run:
                    break

            if total_deleted_count >= total_duplicate_delete_count_max_per_run:
                break

        logger.info('total number of excess duplicates deleted: %s',
                    total_deleted_count)