Ejemplo n.º 1
0
class UserViewSet(viewsets.ReadOnlyModelViewSet):
    """
    Can specify `me` instead of the `username` to retrieve details on the current user.
    """

    queryset = User.objects.all().prefetch_related(
        Prefetch("booking_groups",
                 Group.objects.filter(memberships__accepted=True)))
    permission_classes = [IsAuthenticated]
    serializer_class = UserSerializer
    lookup_field = "username"
    filter_backends = [DjangoFilterBackend]
    filterset_fields = ["username", "first_name", "last_name"]

    def get_object(self):
        lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field
        param = self.kwargs[lookup_url_kwarg]
        if param == "me":
            return self.request.user
        else:
            return super().get_object()

    def get_queryset(self):
        if not self.request.user.is_authenticated:
            return User.objects.none()

        queryset = User.objects.all()
        queryset = queryset.prefetch_related(
            Prefetch(
                "memberships",
                GroupMembership.objects.filter(
                    group__in=self.request.user.booking_groups.all(),
                    accepted=True),
            ))
        return queryset

    @action(detail=True, methods=["get"])
    def invites(self, request, username=None):
        """
        Retrieve all invites for a given user.
        """
        if username == "me":
            username = request.user.username

        user = get_object_or_404(User, username=username)
        return Response(
            GroupMembershipSerializer(
                GroupMembership.objects.filter(
                    user=user,
                    accepted=False,
                    group__in=self.request.user.booking_groups.all(),
                ),
                many=True,
            ).data)

    @action(detail=True, methods=["post"])
    def activate(self, request, username=None):
        """
        Activate a user's account. Must be run when a user signs in for the first time, at least.
        The action is idempotent, so no harm in calling it multiple times.
        """
        if username == "me":
            username = request.user.username

        user = get_object_or_404(User, username=username)
        if user != request.user:
            return HttpResponseForbidden()

        # Ensure that all invites for this user, even ones created before their account was in the
        # DB, are associated with the User object.
        GroupMembership.objects.filter(username=user.username).update(
            user=user)
        return Response({"success": True})
Ejemplo n.º 2
0
def create_notification(event=None, *args, **kwargs):
    if 'recipients' in kwargs:
        # mimic existing code so that when recipients is specified, no other system or personal notifications are sent.
        logger.debug('creating notifications for recipients')
        for recipient_notifications in Notifications.objects.filter(user__username__in=kwargs['recipients'], user__is_active=True, product=None):
            # kwargs.update({'user': recipient_notifications.user})
            process_notifications(event, recipient_notifications, *args, **kwargs)
    else:
        logger.debug('creating system notifications for event: %s', event)
        # send system notifications to all admin users

        # System notifications
        try:
            system_notifications = Notifications.objects.get(user=None)
        except Exception:
            system_notifications = Notifications()

        # System notifications are sent one with user=None, which will trigger email to configured system email, to global slack channel, etc.
        process_notifications(event, system_notifications, *args, **kwargs)

        # All admins will also receive system notifications, but as part of the person global notifications section below
        # This time user is set, so will trigger email to personal email, to personal slack channel (mention), etc.
        # only retrieve users which have at least one notification type enabled for this event type.
        logger.debug('creating personal notifications for event: %s', event)

        product = None
        if 'product' in kwargs:
            product = kwargs.get('product')

        if not product and 'engagement' in kwargs:
            product = kwargs['engagement'].product

        if not product and 'test' in kwargs:
            product = kwargs['test'].engagement.product

        if not product and 'finding' in kwargs:
            product = kwargs['finding'].test.engagement.product

        # get users with either global notifications, or a product specific noditiciation
        # and all admin/superuser, they will always be notified
        users = Dojo_User.objects.filter(is_active=True).prefetch_related(Prefetch(
            "notifications_set",
            queryset=Notifications.objects.filter(Q(product_id=product) | Q(product__isnull=True)),
            to_attr="applicable_notifications"
        )).annotate(applicable_notifications_count=Count('notifications__id', filter=Q(notifications__product_id=product) | Q(notifications__product__isnull=True)))\
            .filter((Q(applicable_notifications_count__gt=0) | Q(is_superuser=True) | Q(is_staff=True)))

        # only send to authorized users or admin/superusers
        if product:
            users = users.filter(Q(id__in=product.authorized_users.all()) | Q(id__in=product.prod_type.authorized_users.all()) | Q(is_superuser=True) | Q(is_staff=True))

        for user in users:
            # send notifications to user after merging possible multiple notifications records (i.e. personal global + personal product)
            # kwargs.update({'user': user})
            applicable_notifications = user.applicable_notifications
            if user.is_staff or user.is_superuser:
                # admin users get all system notifications
                applicable_notifications.append(system_notifications)

            notifications_set = Notifications.merge_notifications_list(applicable_notifications)
            notifications_set.user = user
            process_notifications(event, notifications_set, *args, **kwargs)
Ejemplo n.º 3
0
 def get_queryset(self):
     qs = super(ContextFormDetail, self).get_queryset()
     field_qs = Fieldidable.objects.order_by('order')
     return qs.prefetch_related(Prefetch('fields', queryset=field_qs))
Ejemplo n.º 4
0
 def with_stats(self):
     queryset = MonitoringInstitution.objects.with_point().with_monitoring()
     return self.prefetch_related(
         Prefetch('monitoringinstitution_set',
                  queryset=queryset,
                  to_attr='stats'))
Ejemplo n.º 5
0
def get_cart_and_items(request):
    objects = Cart.objects.prefetch_related(
        Prefetch('items')).get(user=request.user)
    return objects
Ejemplo n.º 6
0
class CompanyViewSet(ArchivableViewSetMixin, CoreViewSet):
    """Company view set."""

    serializer_class = CompanySerializer
    required_scopes = (Scope.internal_front_end, )
    unarchive_validators = (NotATransferredCompanyValidator(), )
    filter_backends = (DjangoFilterBackend, OrderingFilter)
    filterset_fields = ('global_headquarters_id',
                        'global_ultimate_duns_number')
    ordering_fields = ('name', 'created_on')
    queryset = Company.objects.select_related(
        'address_country',
        'archived_by',
        'business_type',
        'employee_range',
        'export_experience_category',
        'global_headquarters__one_list_account_owner__dit_team__country',
        'global_headquarters__one_list_account_owner__dit_team__uk_region',
        'global_headquarters__one_list_account_owner__dit_team',
        'global_headquarters__one_list_account_owner',
        'global_headquarters__one_list_tier',
        'global_headquarters',
        'headquarter_type',
        'one_list_account_owner__dit_team__country',
        'one_list_account_owner__dit_team__uk_region',
        'one_list_account_owner__dit_team',
        'one_list_account_owner',
        'one_list_tier',
        'registered_address_country',
        'transferred_to',
        'turnover_range',
        'uk_region',
    ).prefetch_related(
        Prefetch('contacts', queryset=get_contact_queryset()),
        Prefetch('investor_investment_projects',
                 queryset=get_slim_investment_project_queryset()),
        'export_to_countries',
        'future_interest_countries',
        'sector__parent__parent',
        'sector__parent',
        'sector',
        Prefetch('export_countries', queryset=get_export_country_queryset()),
    )

    @action(
        methods=['post'],
        detail=True,
        permission_classes=[
            HasPermissions(
                f'company.{CompanyPermission.change_company}',
                f'company.{CompanyPermission.change_regional_account_manager}',
            ),
        ],
        schema=StubSchema(),
    )
    def self_assign_account_manager(self, request, *args, **kwargs):
        """
        Sets the company to be an international trade adviser-managed One List company, and
        assigns the authenticated user as the account manager.

        This means:

        - setting the One List tier to 'Tier D - Interaction Trade Adviser Accounts' (using the
        tier ID, not the name)
        - setting the authenticated user as the One List account manager (overwriting the
        existing value)

        The operation is not allowed if:

        - the company is a subsidiary of a One List company
        - the company is already a One List company on a different tier (i.e. not 'Tier D -
        Interaction Trade Adviser Accounts')
        """
        instance = self.get_object()
        serializer = SelfAssignAccountManagerSerializer(instance=instance,
                                                        data=request.data)
        serializer.is_valid(raise_exception=True)
        serializer.save(request.user)
        return Response(None, status=status.HTTP_204_NO_CONTENT)

    @action(
        methods=['post'],
        detail=True,
        permission_classes=[
            HasPermissions(
                f'company.{CompanyPermission.change_company}',
                f'company.{CompanyPermission.change_regional_account_manager}',
            ),
        ],
        schema=StubSchema(),
    )
    def remove_account_manager(self, request, *args, **kwargs):
        """
        Remove the One List account manager and tier from a company if it is an international
        trade adviser-managed One List company.

        The operation is not allowed if the company is a One List company that isn't on
        'Tier D - Interaction Trade Adviser Accounts'.
        """
        instance = self.get_object()
        serializer = RemoveAccountManagerSerializer(instance=instance,
                                                    data=request.data)
        serializer.is_valid(raise_exception=True)
        serializer.save(request.user)
        return Response(None, status=status.HTTP_204_NO_CONTENT)

    @action(
        methods=['patch'],
        permission_classes=[
            HasPermissions(
                f'company.{CompanyPermission.change_company}',
                f'company.change_companyexportcountry',
            ),
        ],
        detail=True,
    )
    def update_export_detail(self, request, *args, **kwargs):
        """
        Update export related information for the company.
        """
        instance = self.get_object()
        serializer = UpdateExportDetailsSerializer(
            instance=instance,
            data=request.data,
            partial=True,
        )
        serializer.is_valid(raise_exception=True)
        serializer.save(request.user)
        return Response(None, status=status.HTTP_204_NO_CONTENT)
Ejemplo n.º 7
0
def profile(request):

    customer1 = Customer.objects.get(name=request.user.UserName)
    # or
    # return Id of customer we can use in foreignke  y field like line 41
    customer2 = request.user.pk
    # or
    # OnetoOne Field Actually request.user is itself an object(simpleLazyObject) so we are accessing child customer directly
    customer = request.user.customer
    # print(request.user.pk)
    # print(type(request.user))
    print(customer)
    # Actually i and for i in Orders.objects.all() then i.item is exactly same by the look But i will not hit db everytime we access the child(or)Column like customer.orderby.name
    # returns Order Object we can access customer model without hitting db
    i = Orders.objects.select_related("OrderedBy").first()
    # print(i.Item)
    # print(Orders.objects.all())
    # returns Order Object we can access Products model without hitting db
    i2 = Orders.objects.select_related("Item").last()
    # print(i2.OrderedBy)
    # print(type(i))
    # print(i)
    # print(i2)
    # returns CustomerObject we can get all the orders without hitting db
    p1 = Customer.objects.prefetch_related("orders_set").first()
    print(p1.orders_set.all())
    p2 = Customer.objects.prefetch_related(
        Prefetch("orders_set", Orders.objects.filter(Item__Price__range=(2000, 5000))))
    for i in p2:
        print(i.orders_set.all())
    # using prefetch method to filter even those orders instead getting all()

    # or primarykey of Customer that request.user.pk else pass 2 or 3 anynumber
    o = Orders.objects.filter(OrderedBy=customer)
    # print(Orders._meta.get_all_field_names())
    print(Orders._meta.get_fields())
    for l in o:
        print(l.Item.Price)
        print(l.Item.__class__)
        print(l.OrderedBy.Phone)
    # o1 = Orders.objects.get(OrderedBy=request.user)
    print(o)
    # print(o1)
    print(request.user)
    print(request.user.customer.Phone)
    form = customerProfile(request.POST or None, instance=customer)
    if("profile" in request.get_full_path()):
        # request.user will give name chivo request.user.pk will give primary key for chivo that 1
        CusObj = Customer.objects.get(Profile=request.user.pk)
        context = {"customer": CusObj}
        template = "MyProfile.html"
        return render(request, template, context)
    else:
        if request.method == "POST":
            form = customerProfile(request.POST or None,
                                   request.FILES or None, instance=customer)
            if form.is_valid():
                print("Came IN")
                form.save()
                return HttpResponseRedirect("/profile/")

        template = "editProfile.html"
        context = {"form": form, "customer": customer}
        return render(request, template, context)
Ejemplo n.º 8
0
    def __init__(self, *args, **kwargs):
        """
        Takes additional keyword arguments:

        :param category: The category to choose from
        :param event: The event this belongs to
        :param initial: The current set of add-ons
        :param quota_cache: A shared dictionary for quota caching
        :param item_cache: A shared dictionary for item/category caching
        """
        category = kwargs.pop('category')
        event = kwargs.pop('event')
        current_addons = kwargs.pop('initial')
        quota_cache = kwargs.pop('quota_cache')
        item_cache = kwargs.pop('item_cache')

        super().__init__(*args, **kwargs)

        if category.pk not in item_cache:
            # Get all items to possibly show
            items = category.items.filter(
                Q(active=True)
                & Q(
                    Q(available_from__isnull=True)
                    | Q(available_from__lte=now()))
                & Q(
                    Q(available_until__isnull=True)
                    | Q(available_until__gte=now()))
                & Q(hide_without_voucher=False)).prefetch_related(
                    'variations__quotas',  # for .availability()
                    Prefetch('quotas', queryset=event.quotas.all()),
                    Prefetch('variations',
                             to_attr='available_variations',
                             queryset=ItemVariation.objects.filter(
                                 active=True,
                                 quotas__isnull=False).distinct()),
                ).annotate(quotac=Count('quotas'),
                           has_variations=Count('variations')).filter(
                               quotac__gt=0).order_by('category__position',
                                                      'category_id',
                                                      'position', 'name')
            item_cache[category.pk] = items
        else:
            items = item_cache[category.pk]

        for i in items:
            if i.has_variations:
                choices = [('', _('no selection'), '')]
                for v in i.available_variations:
                    cached_availability = v.check_quotas(_cache=quota_cache)
                    choices.append(
                        (v.pk, self._label(event, v, cached_availability),
                         v.description))

                field = AddOnVariationField(
                    choices=choices,
                    label=i.name,
                    required=False,
                    widget=AddOnVariationSelect,
                    help_text=rich_text(str(i.description)),
                    initial=current_addons.get(i.pk),
                )
            else:
                cached_availability = i.check_quotas(_cache=quota_cache)
                field = forms.BooleanField(
                    label=self._label(event, i, cached_availability),
                    required=False,
                    initial=i.pk in current_addons,
                    help_text=rich_text(str(i.description)),
                )

            self.fields['item_%s' % i.pk] = field
Ejemplo n.º 9
0
class ClassRoomDetail(generics.RetrieveAPIView):
    queryset = ClassRoom.objects.select_related('course').prefetch_related('attending',
        Prefetch('events', queryset=Event.objects.select_related('to_phase', 'user'))).all()
    serializer_class = ClassRoomSerializer
Ejemplo n.º 10
0
    def get(self, request, version="v1", format=None):

        try:
            doctor_profile = Doctor.objects.filter(user=request.user).prefetch_related(Prefetch('availability', queryset=DoctorHospital.objects.distinct('hospital_id').order_by('hospital_id','fees')))

            # count all upcoming/accepted appointments
            doctor_appointments = OpdAppointment.objects.filter(doctor=doctor_profile[0],status=2).count()

        except Doctor.DoesNotExist as e:
            raise Exception('No doctor linked with the logged in User')

        serialized_doctor = DoctorProfileSerializer(doctor_profile[0])

        return Response({
            "profile" : serialized_doctor.data,
            "doctor_appointments" : doctor_appointments
        })
Ejemplo n.º 11
0
    def handle(self, *args, **options):
        course_instance_ids = options['course_instance_id']

        exercise_file_path = options['exercise_output_file']
        submission_file_path = options['submission_output_file']
        if not exercise_file_path and not submission_file_path:
            self.stderr.write(
                'At least one of the options "--exercise-output-file" or '
                '"--submission-output-file" must be given in order to output anything.'
            )
            self.stderr.write('Aborting...')
            sys.exit(2)

        limit_submissions_start = options['limit_submissions_start']
        limit_submissions_end = options['limit_submissions_end']
        if limit_submissions_start is not None and limit_submissions_start < 0:
            self.stderr.write(
                "--limit-submissions-start must be a non-negative integer.")
            sys.exit(2)
        if limit_submissions_end is not None and limit_submissions_end < 0:
            self.stderr.write(
                "--limit-submissions-end must be a non-negative integer.")
            sys.exit(2)
        submissions_slice = slice(limit_submissions_start,
                                  limit_submissions_end)

        # Fetch all exercises from the given course instances.
        exercises = BaseExercise.objects.filter(
            course_module__course_instance__pk__in=course_instance_ids,
        ).select_related('category', ).prefetch_related(
            Prefetch(
                'course_module',
                queryset=CourseModule.objects.select_related(
                    'course_instance', ).only(
                        'order',
                        'points_to_pass',
                        'closing_time',
                        'late_submissions_allowed',
                        'late_submission_deadline',
                        'late_submission_penalty',
                        'course_module__course_instance__instance_name',
                        'course_module__course_instance__url',
                    ),
            ), ).defer(
                'description',
                'service_url',
                'exercise_info',
                'model_answers',
                'templates',
                'content',
                'category__status',
                'category__description',
                'category__course_instance',
            ).order_by('id', )

        # Fetch all submissions for the exercises.
        if submission_file_path:
            submissions = Submission.objects.filter(
                exercise__in=exercises, ).prefetch_related(
                    Prefetch(
                        'submitters',
                        queryset=UserProfile.objects.select_related(
                            'user').only('user__id'),
                        to_attr='submitter_userprofiles',
                    ), ).defer(
                        'hash',
                        'grader',
                        'feedback',
                        'assistant_feedback',
                        'submission_data',
                        'grading_data',
                        'meta_data',
                    ).order_by('id', )[submissions_slice]

            # Fetch all deadline deviations in the course instances.
            all_deadline_deviations_queryset = DeadlineRuleDeviation.objects.filter(
                exercise__course_module__course_instance__id__in=
                course_instance_ids, ).prefetch_related(
                    Prefetch(
                        'exercise',
                        queryset=BaseExercise.objects.select_related(
                            'course_module', ).only(
                                'id',
                                'course_module__id',
                                'course_module__closing_time',
                                'course_module__course_instance__id',
                                'course_module__course_instance__course__id',
                            ),
                    ),
                    Prefetch(
                        'submitter',
                        queryset=UserProfile.objects.select_related(
                            'user').only('user__id'),
                    ),
                ).only(
                    'exercise__id',
                    'exercise__course_module__closing_time',
                    'exercise__course_module__course_instance__id',
                    'submitter__user__id',
                    'extra_minutes',
                )

            all_deadline_deviations = {}
            for dl_dev in all_deadline_deviations_queryset:
                all_deadline_deviations.setdefault(
                    dl_dev.exercise.id,
                    {})[dl_dev.submitter.user.id] = dl_dev.get_new_deadline()

            # Fetch all max submissions deviations in the course instances.
            all_max_submissions_deviations_queryset = MaxSubmissionsRuleDeviation.objects.filter(
                exercise__course_module__course_instance__id__in=
                course_instance_ids, ).select_related(
                    'exercise', ).prefetch_related(
                        Prefetch(
                            'submitter',
                            queryset=UserProfile.objects.select_related(
                                'user').only('user__id'),
                        ), ).only(
                            'exercise__id',
                            'exercise__max_submissions',
                            'submitter__user__id',
                            'extra_submissions',
                        )

            all_max_submissions_deviations = {}
            for sbms_dev in all_max_submissions_deviations_queryset:
                all_max_submissions_deviations.setdefault(
                    sbms_dev.exercise.id,
                    {},
                )[sbms_dev.submitter.user.
                  id] = sbms_dev.exercise.max_submissions + sbms_dev.extra_submissions

        # Create the CSV output files.
        # One CSV file for all exercises.
        if exercise_file_path:
            self.write_exercise_csv(exercise_file_path, exercises)
            self.stdout.write("Created the exercise file: " +
                              exercise_file_path)

        # One CSV file for all submissions.
        if submission_file_path:
            self.write_submission_csv(
                submission_file_path,
                submissions,
                all_deadline_deviations,
                all_max_submissions_deviations,
            )
            self.stdout.write("Created the submission file: " +
                              submission_file_path)
Ejemplo n.º 12
0
def handle_upload_content(slug, code, part, f, user):
    """
    Update translations in the database from uploaded file.

    :param str slug: Project slug.
    :param str code: Locale code.
    :param str part: Resource path or Subpage name.
    :param UploadedFile f: UploadedFile instance.
    :param User user: User uploading the file.
    """
    # Avoid circular import; someday we should refactor to avoid.
    from pontoon.sync import formats
    from pontoon.sync.changeset import ChangeSet
    from pontoon.sync.vcs_models import VCSProject
    from pontoon.base.models import (
        ChangedEntityLocale,
        Entity,
        Locale,
        Project,
        Resource,
        Translation,
        update_stats,
    )

    relative_path = _get_relative_path_from_part(slug, part)
    project = get_object_or_404(Project, slug=slug)
    locale = get_object_or_404(Locale, code__iexact=code)
    resource = get_object_or_404(Resource,
                                 project__slug=slug,
                                 path=relative_path)

    # Store uploaded file to a temporary file and parse it
    extension = os.path.splitext(f.name)[1]
    with tempfile.NamedTemporaryFile(suffix=extension) as temp:
        for chunk in f.chunks():
            temp.write(chunk)
        temp.flush()
        resource_file = formats.parse(temp.name)

    # Update database objects from file
    changeset = ChangeSet(project, VCSProject(project, locales=[locale]),
                          timezone.now())
    entities_qs = Entity.objects.filter(
        resource__project=project,
        resource__path=relative_path,
        obsolete=False).prefetch_related(
            Prefetch('translation_set',
                     queryset=Translation.objects.filter(locale=locale),
                     to_attr='db_translations')).prefetch_related(
                         Prefetch('translation_set',
                                  queryset=Translation.objects.filter(
                                      locale=locale,
                                      approved_date__lte=timezone.now()),
                                  to_attr='old_translations'))
    entities_dict = {entity.key: entity for entity in entities_qs}

    for vcs_translation in resource_file.translations:
        key = vcs_translation.key
        if key in entities_dict:
            entity = entities_dict[key]
            changeset.update_entity_translations_from_vcs(
                entity, locale.code, vcs_translation, user,
                entity.db_translations, entity.old_translations)

    changeset.bulk_create_translations()
    changeset.bulk_update_translations()
    update_stats(resource, locale)

    # Mark translations as changed
    changed_entities = {}
    existing = ChangedEntityLocale.objects.values_list('entity',
                                                       'locale').distinct()
    for t in changeset.translations_to_create + changeset.translations_to_update:
        key = (t.entity.pk, t.locale.pk)
        # Remove duplicate changes to prevent unique constraint violation
        if not key in existing:
            changed_entities[key] = ChangedEntityLocale(entity=t.entity,
                                                        locale=t.locale)

    ChangedEntityLocale.objects.bulk_create(changed_entities.values())
Ejemplo n.º 13
0
def get_grouped_items(event, subevent=None, voucher=None):
    items = event.items.all().filter(
        Q(active=True)
        & Q(Q(available_from__isnull=True) | Q(available_from__lte=now()))
        & Q(Q(available_until__isnull=True) | Q(available_until__gte=now()))
        & Q(Q(category__isnull=True) | Q(category__is_addon=False)))

    vouchq = Q(hide_without_voucher=False)
    if voucher:
        if voucher.item_id:
            vouchq |= Q(pk=voucher.item_id)
            items = items.filter(pk=voucher.item_id)
        elif voucher.quota_id:
            items = items.filter(quotas__in=[voucher.quota_id])

    items = items.filter(vouchq).select_related(
        'category',
        'tax_rule',  # for re-grouping
    ).prefetch_related(
        Prefetch('quotas',
                 to_attr='_subevent_quotas',
                 queryset=event.quotas.filter(subevent=subevent)),
        Prefetch('variations',
                 to_attr='available_variations',
                 queryset=ItemVariation.objects.filter(
                     active=True, quotas__isnull=False).prefetch_related(
                         Prefetch('quotas',
                                  to_attr='_subevent_quotas',
                                  queryset=event.quotas.filter(
                                      subevent=subevent))).distinct()),
    ).annotate(quotac=Count('quotas'),
               has_variations=Count('variations')).filter(
                   quotac__gt=0).order_by('category__position', 'category_id',
                                          'position', 'name')
    display_add_to_cart = False
    external_quota_cache = event.cache.get('item_quota_cache')
    quota_cache = external_quota_cache or {}

    if subevent:
        item_price_override = subevent.item_price_overrides
        var_price_override = subevent.var_price_overrides
    else:
        item_price_override = {}
        var_price_override = {}

    for item in items:
        if voucher and voucher.item_id and voucher.variation_id:
            # Restrict variations if the voucher only allows one
            item.available_variations = [
                v for v in item.available_variations
                if v.pk == voucher.variation_id
            ]

        max_per_order = item.max_per_order or int(
            event.settings.max_items_per_order)

        if not item.has_variations:
            item._remove = not bool(item._subevent_quotas)

            if voucher and (voucher.allow_ignore_quota or voucher.block_quota):
                item.cached_availability = (Quota.AVAILABILITY_OK,
                                            voucher.max_usages -
                                            voucher.redeemed)
            else:
                item.cached_availability = list(
                    item.check_quotas(subevent=subevent, _cache=quota_cache))

            item.order_max = min(
                item.cached_availability[1] if item.cached_availability[1]
                is not None else sys.maxsize, max_per_order)

            price = item_price_override.get(item.pk, item.default_price)
            if voucher:
                price = voucher.calculate_price(price)
            item.display_price = item.tax(price)

            display_add_to_cart = display_add_to_cart or item.order_max > 0
        else:
            for var in item.available_variations:
                if voucher and (voucher.allow_ignore_quota
                                or voucher.block_quota):
                    var.cached_availability = (Quota.AVAILABILITY_OK,
                                               voucher.max_usages -
                                               voucher.redeemed)
                else:
                    var.cached_availability = list(
                        var.check_quotas(subevent=subevent,
                                         _cache=quota_cache))

                var.order_max = min(
                    var.cached_availability[1] if var.cached_availability[1]
                    is not None else sys.maxsize, max_per_order)

                price = var_price_override.get(var.pk, var.price)
                if voucher:
                    price = voucher.calculate_price(price)
                var.display_price = var.tax(price)

                display_add_to_cart = display_add_to_cart or var.order_max > 0

            item.available_variations = [
                v for v in item.available_variations if v._subevent_quotas
            ]
            if voucher and voucher.variation_id:
                item.available_variations = [
                    v for v in item.available_variations
                    if v.pk == voucher.variation_id
                ]

            if len(item.available_variations) > 0:
                item.min_price = min([
                    v.display_price.net if event.settings.display_net_prices
                    else v.display_price.gross
                    for v in item.available_variations
                ])
                item.max_price = max([
                    v.display_price.net if event.settings.display_net_prices
                    else v.display_price.gross
                    for v in item.available_variations
                ])

            item._remove = not bool(item.available_variations)

    if not external_quota_cache:
        event.cache.set('item_quota_cache', quota_cache, 5)
    items = [
        item for item in items
        if (len(item.available_variations) > 0 or not item.has_variations)
        and not item._remove
    ]
    return items, display_add_to_cart
Ejemplo n.º 14
0
    def resolve_bills(
        self,
        info,
        before=None,
        after=None,
        first=None,
        last=None,
        jurisdiction=None,
        chamber=None,
        session=None,
        updated_since=None,
        classification=None,
        subject=None,
        sponsor=None,
        action_since=None,
        search_query=None,
    ):
        # q (full text)
        bills = Bill.objects.all()

        if jurisdiction:
            bills = bills.filter(**jurisdiction_query(jurisdiction))
        subjects = [subject] if subject else []
        bills = search_bills(
            bills=bills,
            query=search_query,
            chamber=chamber,
            session=session,
            classification=classification,
            subjects=subjects,
            sort="-updated",
        )
        if updated_since:
            bills = bills.filter(updated_at__gte=updated_since)
        if action_since:
            bills = bills.filter(latest_action_date__gte=action_since)
        if sponsor:
            sponsor_args = {}
            if "primary" in sponsor:
                sponsor_args["sponsorships__primary"] = sponsor["primary"]
            if sponsor.get("person"):
                sponsor_args["sponsorships__person_id"] = sponsor["person"]
            elif sponsor.get("name"):
                sponsor_args["sponsorships__name"] = sponsor["name"]
            bills = bills.filter(**sponsor_args)

        bills = optimize(
            bills,
            info,
            [
                ".abstracts",
                ".otherTitles",
                ".otherIdentifiers",
                ".actions",
                ".actions.organization",
                ".actions.relatedEntities",
                ".actions.relatedEntities.organization",
                ".actions.relatedEntities.person",
                ".sponsorships",
                ".documents",
                ".versions",
                ".documents.links",
                ".versions.links",
                ".sources",
                ".relatedBills",
                ".votes",
                ".votes.counts",
                (
                    ".votes.votes",
                    Prefetch("votes__votes",
                             PersonVote.objects.all().select_related("voter")),
                ),
            ],
            [".legislativeSession"
             ".legislativeSession.jurisdiction"],
        )

        return bills
Ejemplo n.º 15
0
def update_list_settings(account: Account, user_input: Dict) -> Dict[str, Any]:
    """

    This cannot update the urls, as that would increase complexity too much.

    :param account:
    :param user_input: {
        'id': int,
        'name': str,
        'enable_scans': bool,
        'scan_type': str,

        # todo: Who should set this? Should this be set by admins? How can we avoid permission hell?
        # Probably as long as the settings are not too detailed / too frequently.
        'automated_scan_frequency': str,
    }
    :return:
    """

    expected_keys = [
        'id', 'name', 'enable_scans', 'scan_type', 'automated_scan_frequency',
        'scheduled_next_scan'
    ]
    if not keys_are_present_in_object(expected_keys, user_input):
        return operation_response(error=True, message="Missing settings.")

    prefetch_last_scan = Prefetch(
        'accountinternetnlscan_set',
        queryset=AccountInternetNLScan.objects.order_by('-id').select_related(
            'scan'),
        to_attr='last_scan')

    last_report_prefetch = Prefetch(
        'urllistreport_set',
        # filter(pk=UrlListReport.objects.latest('id').pk).
        queryset=UrlListReport.objects.order_by('-id').only('id', 'at_when'),
        to_attr='last_report')

    urllist = UrlList.objects.all().filter(
        account=account, id=user_input['id'],
        is_deleted=False).annotate(num_urls=Count('urls')).prefetch_related(
            prefetch_last_scan, last_report_prefetch).first()

    if not urllist:
        return operation_response(error=True, message="No list of urls found.")

    # Yes, you can try and set any value. Values that are not recognized do not result in errors / error messages,
    # instead they will be overwritten with the default. This means less interaction with users / less annoyance over
    # errors on such simple forms.
    frequency = validate_list_automated_scan_frequency(
        user_input['automated_scan_frequency'])
    data = {
        'id': urllist.id,
        'account': account,
        'name': validate_list_name(user_input['name']),
        'enable_scans': bool(user_input['enable_scans']),
        'scan_type': validate_list_scan_type(user_input['scan_type']),
        'automated_scan_frequency': frequency,
        'scheduled_next_scan': determine_next_scan_moment(frequency),
    }

    updated_urllist = UrlList(**data)
    updated_urllist.save()

    # make sure the account is serializable, inject other data.
    data['account'] = account.id
    data['num_urls'] = urllist.num_urls
    data['last_scan_id'] = None
    data['last_scan_state'] = None
    data['last_scan'] = None
    data['last_scan_finished'] = None
    data['last_report_id'] = None
    data['last_report_date'] = None

    if urllist.last_scan:
        data['last_scan_id'] = urllist.last_scan[0].scan.id
        data['last_scan_state'] = urllist.last_scan[0].state
        data['last_scan'] = urllist.last_scan[0].started_on.isoformat()
        data['last_scan_finished'] = urllist.last_scan[0].state in [
            "finished", "cancelled"
        ]

    if urllist.last_report:
        data['last_report_id'] = urllist.last_report[0].id
        data['last_report_date'] = urllist.last_report[0].at_when

    data['scan_now_available'] = updated_urllist.is_scan_now_available()

    # list warnings (might do: make more generic, only if another list warning ever could occur.)
    list_warnings = []
    if urllist.num_urls > config.DASHBOARD_MAXIMUM_DOMAINS_PER_LIST:
        list_warnings.append('WARNING_DOMAINS_IN_LIST_EXCEED_MAXIMUM_ALLOWED')
    data['list_warnings'] = []

    log.debug(data)

    # Sprinkling an activity stream action.
    action.send(account,
                verb='updated list',
                target=updated_urllist,
                public=False)

    return operation_response(success=True,
                              message="Updated list settings",
                              data=data)
Ejemplo n.º 16
0
 def setup_eager_loading(cls, queryset):
     """ Perform necessary eager loading of data. """
     queryset = queryset.prefetch_related(
         Prefetch('users', queryset=User.objects.only('id'))).annotate(
             users_amount=Count('users'))
     return queryset
Ejemplo n.º 17
0
def context_navbar(request):
    channels = Channel.objects.filter(
        visible=True, video__is_draft=False).distinct().annotate(
            video_count=Count("video", distinct=True)).prefetch_related(
                Prefetch("themes",
                         queryset=Theme.objects.filter(
                             parentId=None).distinct().annotate(
                                 video_count=Count("video", distinct=True))))

    all_channels = Channel.objects.all().distinct().annotate(
        video_count=Count("video", distinct=True)).prefetch_related(
            Prefetch("themes",
                     queryset=Theme.objects.all().distinct().annotate(
                         video_count=Count("video", distinct=True))))

    types = Type.objects.filter(video__is_draft=False).distinct().annotate(
        video_count=Count("video", distinct=True))

    disciplines = Discipline.objects.filter(
        video__is_draft=False).distinct().annotate(
            video_count=Count("video", distinct=True))

    linkFooter = LinkFooter.objects.all()

    owners_filter_args = {
        'video__is_draft': False,
    }
    if MENUBAR_HIDE_INACTIVE_OWNERS:
        owners_filter_args['is_active'] = True
    if MENUBAR_SHOW_STAFF_OWNERS_ONLY:
        owners_filter_args['is_staff'] = True

    VALUES_LIST.append('video_count')
    VALUES_LIST.append('fl_name')
    VALUES_LIST.append('fl_firstname')

    owners = Owner.objects.filter(
        **owners_filter_args).distinct().order_by(ORDER_BY).annotate(
            video_count=Count("video", distinct=True)).annotate(
                fl_name=Lower(Substr("last_name", 1, 1))).annotate(
                    fl_firstname=Lower(Substr("first_name", 1, 1))).order_by(
                        'fl_name').values(*list(VALUES_LIST))

    if (USE_RGPD and not request.user.is_authenticated):
        listowner = {}
    else:
        listowner = get_list_owner(owners)

    LAST_VIDEOS = get_last_videos() if request.path == "/" else None

    list_videos = Video.objects.filter(encoding_in_progress=False,
                                       is_draft=False)
    VIDEOS_COUNT = list_videos.count()
    VIDEOS_DURATION = str(
        timedelta(seconds=list_videos.aggregate(Sum(
            'duration'))['duration__sum'])) if list_videos.aggregate(
                Sum('duration'))['duration__sum'] else 0

    return {
        'ALL_CHANNELS': all_channels,
        'CHANNELS': channels,
        'TYPES': types,
        'OWNERS': owners,
        'DISCIPLINES': disciplines,
        'LISTOWNER': json.dumps(listowner),
        'LAST_VIDEOS': LAST_VIDEOS,
        'LINK_FOOTER': linkFooter,
        'VIDEOS_COUNT': VIDEOS_COUNT,
        'VIDEOS_DURATION': VIDEOS_DURATION
    }
Ejemplo n.º 18
0
    def list(self, request, *args, **kwargs):
        postcode = request.GET.get("postcode", None)
        coords = request.GET.get("coords", None)

        # TODO: postcode may not make sense everywhere
        errors = None
        if not postcode and not coords:
            return self._error("Postcode or Co-ordinates required")

        try:
            if coords:
                pees = get_post_elections_from_coords(coords)
            else:
                pees = get_post_elections_from_postcode(postcode)
        except Exception as e:
            return self._error(e.message)

        results = []
        pees = pees.select_related("post__organization", "election")
        for pee in pees:
            candidates = []
            for membership in (pee.membership_set.filter(
                    post_election__election=pee.election,
                    role=pee.election.candidate_membership_role,
            ).prefetch_related(
                    Prefetch(
                        "person__memberships",
                        Membership.objects.select_related(
                            "party", "post", "post_election__election"),
                    ),
                    Prefetch(
                        "person__images",
                        PersonImage.objects.select_related("uploading_user"),
                    ),
                    "person__other_names",
                    "person__contact_details",
                    "person__links",
                    "person__identifiers",
                    "person__extra_field_values",
            ).select_related("person")):
                candidates.append(
                    serializers.NoVersionPersonSerializer(
                        instance=membership.person,
                        context={
                            "request": request
                        },
                        read_only=True,
                    ).data)
            election = {
                "election_date": text_type(pee.election.election_date),
                "election_name": pee.election.name,
                "election_id": pee.election.slug,
                "post": {
                    "post_name": pee.post.label,
                    "post_slug": pee.post.slug,
                    "post_candidates": None,
                },
                "organization": pee.post.organization.name,
                "candidates": candidates,
            }

            results.append(election)

        return Response(results)
Ejemplo n.º 19
0
def handle_upload_content(slug, code, part, f, user):
    """
    Update translations in the database from uploaded file.

    :arg str slug: Project slug.
    :arg str code: Locale code.
    :arg str part: Resource path or Subpage name.
    :arg UploadedFile f: UploadedFile instance.
    :arg User user: User uploading the file.
    """
    # Avoid circular import; someday we should refactor to avoid.
    from pontoon.sync import formats
    from pontoon.sync.changeset import ChangeSet
    from pontoon.sync.vcs.models import VCSProject
    from pontoon.base.models import (
        ChangedEntityLocale,
        Entity,
        Locale,
        Project,
        Resource,
        TranslatedResource,
        Translation,
    )

    relative_path = _get_relative_path_from_part(slug, part)
    project = get_object_or_404(Project, slug=slug)
    locale = get_object_or_404(Locale, code=code)
    resource = get_object_or_404(Resource, project__slug=slug, path=relative_path)

    # Store uploaded file to a temporary file and parse it
    extension = os.path.splitext(f.name)[1]
    with tempfile.NamedTemporaryFile(
        prefix='strings' if extension == '.xml' else '',
        suffix=extension,
    ) as temp:
        for chunk in f.chunks():
            temp.write(chunk)
        temp.flush()
        resource_file = formats.parse(temp.name)

    # Update database objects from file
    changeset = ChangeSet(
        project,
        VCSProject(project, locales=[locale]),
        timezone.now()
    )
    entities_qs = Entity.objects.filter(
        resource__project=project,
        resource__path=relative_path,
        obsolete=False
    ).prefetch_related(
        Prefetch(
            'translation_set',
            queryset=Translation.objects.filter(locale=locale),
            to_attr='db_translations'
        )
    ).prefetch_related(
        Prefetch(
            'translation_set',
            queryset=Translation.objects.filter(locale=locale, approved_date__lte=timezone.now()),
            to_attr='db_translations_approved_before_sync'
        )
    )
    entities_dict = {entity.key: entity for entity in entities_qs}

    for vcs_translation in resource_file.translations:
        key = vcs_translation.key
        if key in entities_dict:
            entity = entities_dict[key]
            changeset.update_entity_translations_from_vcs(
                entity, locale.code, vcs_translation, user,
                entity.db_translations, entity.db_translations_approved_before_sync
            )

    changeset.bulk_create_translations()
    changeset.bulk_update_translations()

    if changeset.changed_translations:
        # Update 'active' status of all changed translations and their siblings,
        # i.e. translations of the same entity to the same locale.
        changed_pks = {t.pk for t in changeset.changed_translations}
        (
            Entity.objects
            .filter(translation__pk__in=changed_pks)
            .reset_active_translations(locale=locale)
        )

        # Run checks and create TM entries for translations that pass them
        valid_translations = changeset.bulk_check_translations()
        changeset.bulk_create_translation_memory_entries(valid_translations)

    TranslatedResource.objects.get(resource=resource, locale=locale).calculate_stats()

    # Mark translations as changed
    changed_entities = {}
    existing = ChangedEntityLocale.objects.values_list('entity', 'locale').distinct()
    for t in changeset.changed_translations:
        key = (t.entity.pk, t.locale.pk)
        # Remove duplicate changes to prevent unique constraint violation
        if key not in existing:
            changed_entities[key] = ChangedEntityLocale(entity=t.entity, locale=t.locale)

    ChangedEntityLocale.objects.bulk_create(changed_entities.values())

    # Update latest translation
    if changeset.translations_to_create:
        changeset.translations_to_create[-1].update_latest_translation()
Ejemplo n.º 20
0
    def __init__(self, *args, **kwargs):
        """
        Takes additional keyword arguments:

        :param iao: The ItemAddOn object
        :param event: The event this belongs to
        :param subevent: The event the parent cart position belongs to
        :param initial: The current set of add-ons
        :param quota_cache: A shared dictionary for quota caching
        :param item_cache: A shared dictionary for item/category caching
        """
        self.iao = kwargs.pop('iao')
        category = self.iao.addon_category
        self.event = kwargs.pop('event')
        subevent = kwargs.pop('subevent')
        current_addons = kwargs.pop('initial')
        quota_cache = kwargs.pop('quota_cache')
        item_cache = kwargs.pop('item_cache')
        self.price_included = kwargs.pop('price_included')
        self.sales_channel = kwargs.pop('sales_channel')
        self.base_position = kwargs.pop('base_position')

        super().__init__(*args, **kwargs)

        if subevent:
            item_price_override = subevent.item_price_overrides
            var_price_override = subevent.var_price_overrides
        else:
            item_price_override = {}
            var_price_override = {}

        ckey = '{}-{}'.format(subevent.pk if subevent else 0, category.pk)
        if ckey not in item_cache:
            # Get all items to possibly show
            items = category.items.filter_available(
                channel=self.sales_channel,
                allow_addons=True).select_related('tax_rule').prefetch_related(
                    Prefetch(
                        'quotas',
                        to_attr='_subevent_quotas',
                        queryset=self.event.quotas.filter(subevent=subevent)),
                    Prefetch('variations',
                             to_attr='available_variations',
                             queryset=ItemVariation.objects.filter(
                                 active=True,
                                 quotas__isnull=False).prefetch_related(
                                     Prefetch(
                                         'quotas',
                                         to_attr='_subevent_quotas',
                                         queryset=self.event.quotas.filter(
                                             subevent=subevent))).distinct()),
                    'event').annotate(
                        quotac=Count('quotas'),
                        has_variations=Count('variations')).filter(
                            quotac__gt=0).order_by('category__position',
                                                   'category_id', 'position',
                                                   'name')
            item_cache[ckey] = items
        else:
            items = item_cache[ckey]

        self.vars_cache = {}

        for i in items:
            if i.hidden_if_available:
                q = i.hidden_if_available.availability(_cache=quota_cache)
                if q[0] == Quota.AVAILABILITY_OK:
                    continue

            if i.has_variations:
                choices = [('', _('no selection'), '')]
                for v in i.available_variations:
                    cached_availability = v.check_quotas(subevent=subevent,
                                                         _cache=quota_cache)
                    if self.event.settings.hide_sold_out and cached_availability[
                            0] < Quota.AVAILABILITY_RESERVED:
                        continue

                    if v._subevent_quotas:
                        self.vars_cache[v.pk] = v
                        choices.append(
                            (v.pk,
                             self._label(
                                 self.event,
                                 v,
                                 cached_availability,
                                 override_price=var_price_override.get(v.pk),
                                 initial=current_addons.get(i.pk) == v.pk),
                             v.description))

                n = i.name
                if i.picture:
                    n = escape(n)
                    n += '<br>'
                    n += '<a href="{}" class="productpicture" data-title="{}" data-lightbox="{}">'.format(
                        i.picture.url, escape(escape(i.name)), i.id)
                    n += '<img src="{}" alt="{}">'.format(
                        thumb(i.picture, '60x60^'), escape(i.name))
                    n += '</a>'
                    n = mark_safe(n)
                field = AddOnVariationField(
                    choices=choices,
                    label=n,
                    required=False,
                    widget=AddOnRadioSelect,
                    help_text=rich_text(str(i.description)),
                    initial=current_addons.get(i.pk),
                )
                field.item = i
                if len(choices) > 1:
                    self.fields['item_%s' % i.pk] = field
            else:
                if not i._subevent_quotas:
                    continue
                cached_availability = i.check_quotas(subevent=subevent,
                                                     _cache=quota_cache)
                if self.event.settings.hide_sold_out and cached_availability[
                        0] < Quota.AVAILABILITY_RESERVED:
                    continue
                field = forms.BooleanField(
                    label=self._label(self.event,
                                      i,
                                      cached_availability,
                                      override_price=item_price_override.get(
                                          i.pk),
                                      initial=i.pk in current_addons),
                    required=False,
                    initial=i.pk in current_addons,
                    help_text=rich_text(str(i.description)),
                )
                field.item = i
                self.fields['item_%s' % i.pk] = field
Ejemplo n.º 21
0
def listar_modulos_com_aulas():
    aulas_ordenadas = Aula.objects.order_by('order')
    return Modulo.objects.order_by('order').prefetch_related(
        Prefetch('aula_set', queryset=aulas_ordenadas, to_attr='aulas')).all()
def detect_and_cache_field_values_for_document(log: ProcessLogger,
                                               document: Document,
                                               save: bool = True,
                                               clear_old_values: bool = True,
                                               changed_by_user: User = None,
                                               system_fields_changed: bool = False,
                                               generic_fields_changed: bool = False,
                                               document_initial_load: bool = False,
                                               ignore_field_codes: Set[str] = None,
                                               updated_field_codes: List[str] = None,
                                               skip_modified_values: bool = True,
                                               field_codes_to_detect: Optional[List[str]] = None):
    """
    Detects field values for a document and stores their DocumentFieldValue objects as well as Document.field_value.
    These two should always be consistent.
    :param log:
    :param document:
    :param save:
    :param clear_old_values:
    :param changed_by_user
    :param system_fields_changed
    :param generic_fields_changed
    :param ignore_field_codes
    :param document_initial_load
    :param updated_field_codes - if set, we search for changed and dependent fields only
    :param skip_modified_values - don't overwrite field values overwritten by user
    :param field_codes_to_detect - optional list of fields codes - only these fields are to be detected
    :return:
    """
    import apps.document.repository.document_field_repository as dfr
    field_repo = dfr.DocumentFieldRepository()

    if save and document.status and not document.status.is_active:
        raise RuntimeError(f'Detecting field values for completed documents is not permitted.\n'
                           f'Document: {document.name} (#{document.pk})')

    document_type = document.document_type  # type: DocumentType

    all_fields = document_type.fields \
        .all() \
        .prefetch_related(Prefetch('depends_on_fields', queryset=DocumentField.objects.only('uid').all()))

    all_fields = list(all_fields)

    fields_and_deps = [(f.code, set(f.get_depends_on_codes()) or set()) for f in all_fields]
    dependent_fields = get_dependent_fields(fields_and_deps, set(updated_field_codes)) \
        if updated_field_codes else None

    sorted_codes = order_field_detection(fields_and_deps)
    all_fields_code_to_field = {f.code: f for f in all_fields}  # type: Dict[str, DocumentField]

    log.info(f'Detecting field values for document {document.name} (#{document.pk}), save={save}.\n'
             f'Updated fields: {updated_field_codes or "All"}.\n'
             f'Dependent fields to be detected: {dependent_fields or "All"}.\n'
             f'Ignored fields: {ignore_field_codes}.')

    if updated_field_codes:
        sorted_codes = [c for c in sorted_codes
                        if c in dependent_fields and (not ignore_field_codes or c not in ignore_field_codes)]
    elif ignore_field_codes:
        sorted_codes = [c for c in sorted_codes if c not in ignore_field_codes]

    current_field_values = {f.code: None for f in all_fields}
    # we may get values for fields required for sorted_codes, regarding
    # further dependencies
    # or we may just get all fields' values (field_codes_only=None)
    actual_field_values = field_repo.get_field_code_to_python_value(document_type_id=document_type.pk,
                                                                    doc_id=document.pk,
                                                                    field_codes_only=None)
    current_field_values.update(actual_field_values)

    res = list()

    detecting_field_status = []  # type:List[str]
    detection_errors = []  # type:List[Tuple[str, str, Exception, Any]]

    # do not touch field values modified by user
    skip_codes = set()
    if skip_modified_values:
        skip_codes = set(list(FieldValue.objects.filter(
            modified_by__isnull=False, document_id=document.pk).values_list('field__code', flat=True)))
        processed_ant_fields = set(list(FieldAnnotation.objects.filter(status_id__in=(
            FieldAnnotationStatus.rejected_status_pk(), FieldAnnotationStatus.accepted_status_pk()),
            document_id=document.pk).values_list('field__code', flat=True)))
        skip_codes.update(processed_ant_fields)
        if updated_field_codes:  # these fields have to be deleted despite being set by user
            skip_codes -= set(updated_field_codes)

    if field_codes_to_detect:
        all_codes = set([f.code for f in all_fields])
        skip_codes = skip_codes.union(all_codes - set(field_codes_to_detect))

    if clear_old_values:
        field_repo.delete_document_field_values(document.pk,
                                                list(skip_codes),
                                                updated_field_codes)

    for field_code in sorted_codes:
        if field_code in skip_codes:
            continue
        field = all_fields_code_to_field[field_code]  # type: DocumentField
        typed_field = TypedField.by(field)  # type: TypedField
        field_detection_strategy = FIELD_DETECTION_STRATEGY_REGISTRY[
            field.value_detection_strategy]  # type: FieldDetectionStrategy

        try:
            new_field_value_dto = field_detection_strategy.detect_field_value(
                log=log, doc=document, field=field,
                field_code_to_value=current_field_values)

            if not new_field_value_dto:
                detecting_field_status.append(f"No new values gotten for '{field.code}'")
                continue
            # if is_unit_limit_exceeded(new_field_value_dto, field, document):
            #     continue

            detecting_field_status.append(
                f"{format_value_short_str(new_field_value_dto.field_value)} for '{field.code}'")

            # now merge the detection results with the current DB state
            if save:
                # user = None here to store detected values as owned by system allowing further overwriting
                field_value, annotations = field_repo.update_field_value_with_dto(document=document,
                                                                                  field=field,
                                                                                  field_value_dto=new_field_value_dto,
                                                                                  user=None)

                # and update the field value of this field which may be used for detection of fields depending on it
                current_field_values[field.code] = typed_field.field_value_json_to_python(field_value.value)

            # If save is not requested then do not update current_field_values.
            # Most likely in this case we detect only few requested fields and trying to comply the dependency
            # tree makes no big sense.
        except Exception as e:
            # Additionally logging here because the further compound exception will not contain the full stack trace.
            log.error(f'Exception caught while detecting value of field {field.code} ({typed_field.type_code})',
                      exc_info=e,
                      extra={Document.LOG_FIELD_DOC_ID, str(document.pk)})
            detection_errors.append((field.code, typed_field.type_code, e, sys.exc_info()))

    if save:
        if updated_field_codes:
            user_fields_changed_set = set(updated_field_codes)
            if dependent_fields:
                user_fields_changed_set.update(dependent_fields)
            user_fields_changed = list(user_fields_changed_set)  # type: FieldSpec
        else:
            user_fields_changed = True

        fire_document_changed(sender=detect_and_cache_field_values_for_document,
                              log=log,
                              document=document,
                              changed_by_user=changed_by_user,
                              document_initial_load=document_initial_load,
                              system_fields_changed=system_fields_changed,
                              generic_fields_changed=generic_fields_changed,
                              user_fields_changed=user_fields_changed)
        if dependent_fields:
            msg = f'Recalculating dependent fields for {document.name}: '  # dependent_fields
            msg += ', '.join(dependent_fields)
            msg += '.\n\nSource fields data: \n'
            msg += '; '.join([f'"{k}": "{format_value_short_str(current_field_values[k])}"'
                              for k in current_field_values])
            msg += '.\n\nCalculation results:\n'
            msg += '\n'.join(detecting_field_status)
            log.info(msg, extra={Document.LOG_FIELD_DOC_ID, str(document.pk)})

    if detection_errors:
        fields_str = ', '.join([f'{e[0]} ({e[1]})' for e in detection_errors])
        msg = f'There were errors while detecting fields:\n{fields_str}\n' + \
              f'for document {document.name} (#{document.pk}, type {document_type.code})\n'
        for f_code, f_type, ex, ex_stack in detection_errors:
            msg += f'\n{f_code}, {f_type}: {ex}'
        raise FieldDetectionError(msg)

    return res
Ejemplo n.º 23
0
 def get_queryset(self):
     queryset = super().get_queryset()
     queryset = queryset.prefetch_related(Prefetch('servers'))
     return queryset
Ejemplo n.º 24
0
 def get_queryset(self):
     banned_from = self.request.user.student_set.filter(banned=True).values_list('section__course__id', flat=True)
     return Section.objects.exclude(course__pk__in=banned_from).prefetch_related(Prefetch("spacetimes", queryset=Spacetime.objects.order_by("day_of_week", "start_time"))).filter(
         Q(mentor__user=self.request.user) | Q(students__user=self.request.user) | Q(course__coordinator__user=self.request.user)).distinct()
Ejemplo n.º 25
0
def show_buildings(request):
    buildings = Building.objects.prefetch_related(
        Prefetch("meter_set",
                 Meter.objects.all().order_by("fuel")))
    return render(request, "buildings.html", {"buildings": buildings})
Ejemplo n.º 26
0
    def students(self, request, pk=None):
        if request.method == 'GET':
            section = get_object_or_error(self.get_queryset(), pk=pk)
            return Response(StudentSerializer(section.students.select_related("user").prefetch_related(Prefetch("attendance_set", queryset=Attendance.objects.order_by("date"))).filter(active=True), many=True).data)
        # PUT
        with transaction.atomic():
            """
            We reload the section object for atomicity. Even though we do not update
            the section directly, any student trying to enroll must first acquire a lock on the
            desired section. This allows us to assume that current_student_count is correct.
            """
            section = get_object_or_error(Section.objects, pk=pk)
            section = Section.objects.select_for_update().get(pk=section.pk)
            is_coordinator = bool(section.course.coordinator_set.filter(user=request.user).count())
            if is_coordinator and not request.data.get('email'):
                return Response({'error': 'Must specify email of student to enroll'}, status=status.HTTP_422_UNPROCESSABLE_ENTITY)
            if not (request.user.can_enroll_in_course(section.course) or is_coordinator):
                logger.warn(
                    f"<Enrollment:Failure> User {log_str(request.user)} was unable to enroll in Section {log_str(section)} because they are already involved in this course")
                raise PermissionDenied(
                    "You are already either mentoring for this course or enrolled in a section", status.HTTP_422_UNPROCESSABLE_ENTITY)

            if section.current_student_count >= section.capacity:
                logger.warn(
                    f"<Enrollment:Failure> User {log_str(request.user)} was unable to enroll in Section {log_str(section)} because it was full")
                raise PermissionDenied("There is no space available in this section", status.HTTP_423_LOCKED)
            try:  # Student dropped a section in this course and is now enrolling in a different one
                if is_coordinator:
                    student = Student.objects.get(active=False, section__course=section.course,
                                                  user__email=request.data['email'])
                else:
                    student = request.user.student_set.get(active=False, section__course=section.course)
                old_section = student.section
                student.section = section
                student.active = True
                student.save()
                logger.info(
                    f"<Enrollment:Success> User {log_str(student.user)} swapped into Section {log_str(section)} from Section {log_str(old_section)}")
                return Response(status=status.HTTP_204_NO_CONTENT)
            except Student.DoesNotExist:  # Student is enrolling in this course for the first time
                if is_coordinator:
                    user, _ = User.objects.get_or_create(
                        email=request.data['email'], username=request.data['email'].split('@')[0])
                    student = Student.objects.create(user=user, section=section)
                else:
                    student = Student.objects.create(user=request.user, section=section)
                logger.info(
                    f"<Enrollment:Success> User {log_str(student.user)} enrolled in Section {log_str(section)}")
                return Response({'id': student.id}, status=status.HTTP_201_CREATED)
Ejemplo n.º 27
0
def problem_submit(request, problem=None, submission=None):
    if submission is not None and not request.user.has_perm('judge.resubmit_other') and \
                    get_object_or_404(Submission, id=int(submission)).user.user != request.user:
        raise PermissionDenied()

    profile = request.user.profile
    if request.method == 'POST':
        form = ProblemSubmitForm(request.POST,
                                 instance=Submission(user=profile))
        if form.is_valid():
            if (not request.user.has_perm('judge.spam_submission')
                    and Submission.objects.filter(
                        user=profile, was_rejudged=False).exclude(
                            status__in=['D', 'IE', 'CE', 'AB']).count() > 2):
                return HttpResponse(
                    '<h1>You submitted too many submissions.</h1>', status=503)
            if not form.cleaned_data['problem'].allowed_languages.filter(
                    id=form.cleaned_data['language'].id).exists():
                raise PermissionDenied()
            if not form.cleaned_data['problem'].is_accessible_by(request.user):
                user_logger.info(
                    'Naughty user %s wants to submit to %s without permission',
                    request.user.username, form.cleaned_data['problem'].code)
                return HttpResponseForbidden(
                    '<h1>Do you want me to ban you?</h1>')
            if not request.user.is_superuser and form.cleaned_data[
                    'problem'].banned_users.filter(id=profile.id).exists():
                return generic_message(
                    request, _('Banned from submitting'),
                    _('You have been declared persona non grata for this problem. '
                      'You are permanently barred from submitting this problem.'
                      ))

            if profile.current_contest is not None:
                try:
                    contest_problem = form.cleaned_data[
                        'problem'].contests.get(
                            contest=profile.current_contest.contest)
                except ContestProblem.DoesNotExist:
                    model = form.save()
                else:
                    max_subs = contest_problem.max_submissions
                    if max_subs and get_contest_submission_count(
                            problem, profile) >= max_subs:
                        return generic_message(
                            request, _('Too many submissions'),
                            _('You have exceeded the submission limit for this problem.'
                              ))
                    model = form.save()
                    contest = ContestSubmission(
                        submission=model,
                        problem=contest_problem,
                        participation=profile.current_contest)
                    contest.save()
            else:
                model = form.save()

            profile.update_contest()
            model.judge(rejudge=False)
            return HttpResponseRedirect(
                reverse('submission_status', args=[str(model.id)]))
        else:
            form_data = form.cleaned_data
            if submission is not None:
                sub = get_object_or_404(Submission, id=int(submission))
    else:
        initial = {'language': profile.language}
        if problem is not None:
            initial['problem'] = get_object_or_404(Problem, code=problem)
            problem_object = initial['problem']
            if not problem_object.is_accessible_by(request.user):
                raise Http404()
        if submission is not None:
            try:
                sub = get_object_or_404(Submission, id=int(submission))
                initial['source'] = sub.source
                initial['language'] = sub.language
            except ValueError:
                raise Http404()
        form = ProblemSubmitForm(initial=initial)
        form_data = initial
    if 'problem' in form_data:
        form.fields['language'].queryset = (
            form_data['problem'].usable_languages.order_by(
                'name', 'key').prefetch_related(
                    Prefetch('runtimeversion_set',
                             RuntimeVersion.objects.order_by('priority'))))
        problem_object = form_data['problem']
    if 'language' in form_data:
        form.fields['source'].widget.mode = form_data['language'].ace
    form.fields['source'].widget.theme = profile.ace_theme

    if submission is not None:
        default_lang = sub.language
    else:
        default_lang = request.user.profile.language

    submission_limit = submissions_left = None
    if profile.current_contest is not None:
        try:
            submission_limit = problem_object.contests.get(
                contest=profile.current_contest.contest).max_submissions
        except ContestProblem.DoesNotExist:
            pass
        else:
            if submission_limit:
                submissions_left = submission_limit - get_contest_submission_count(
                    problem, profile)
    return render(
        request, 'problem/submit.html', {
            'form':
            form,
            'title':
            _('Submit to %(problem)s') % {
                'problem': problem_object.translated_name(
                    request.LANGUAGE_CODE),
            },
            'content_title':
            mark_safe(
                escape(_('Submit to %(problem)s')) % {
                    'problem':
                    format_html(
                        u'<a href="{0}">{1}</a>',
                        reverse('problem_detail', args=[problem_object.code]),
                        problem_object.translated_name(request.LANGUAGE_CODE))
                }),
            'langs':
            Language.objects.all(),
            'no_judges':
            not form.fields['language'].queryset,
            'submission_limit':
            submission_limit,
            'submissions_left':
            submissions_left,
            'ACE_URL':
            ACE_URL,
            'default_lang':
            default_lang,
        })
Ejemplo n.º 28
0
    def get_queryset(self, *args, **kwargs):
        viewed_user = self.viewed_user

        ongoing_projectuser_statuses = (
            'Active',
            'Pending - Add',
            'Pending - Remove',
        )
        ongoing_project_statuses = (
            'New',
            'Active',
        )

        qs = ProjectUser.objects.filter(
            user=viewed_user,
            status__name__in=ongoing_projectuser_statuses,
            project__status__name__in=ongoing_project_statuses,
        ).select_related(
            'status',
            'role',
            'project',
            'project__status',
            'project__field_of_science',
            'project__pi',
        ).only(
            'status__name',
            'role__name',
            'project__title',
            'project__status__name',
            'project__field_of_science__description',
            'project__pi__username',
            'project__pi__first_name',
            'project__pi__last_name',
            'project__pi__email',
        ).annotate(
            is_project_pi=ExpressionWrapper(
                Q(user=F('project__pi')),
                output_field=BooleanField(),
            ),
            is_project_manager=ExpressionWrapper(
                Q(role__name='Manager'),
                output_field=BooleanField(),
            ),
        ).order_by(
            '-is_project_pi',
            '-is_project_manager',
            Lower('project__pi__username').asc(),
            Lower('project__title').asc(),
            # unlikely things will get to this point unless there's almost-duplicate projects
            '-project__pk',  # more performant stand-in for '-project__created'
        ).prefetch_related(
            Prefetch(
                lookup='project__projectuser_set',
                queryset=ProjectUser.objects.filter(
                    role__name='Manager',
                    status__name__in=ongoing_projectuser_statuses,
                ).exclude(
                    user__pk__in=[
                        F(
                            'project__pi__pk'
                        ),  # we assume pi is 'Manager' or can act like one - no need to list twice
                        viewed_user.
                        pk,  # we display elsewhere if the user is a manager of this project
                    ], ).select_related(
                        'status',
                        'user',
                    ).only(
                        'status__name',
                        'user__username',
                        'user__first_name',
                        'user__last_name',
                        'user__email',
                    ).order_by('user__username', ),
                to_attr='project_managers',
            ), )

        return qs
Ejemplo n.º 29
0
 def select_for_session_list(self):
     return self.select_related('exam', 'exam__category')\
                .prefetch_related(Prefetch('subjects', to_attr='subject_list'))
Ejemplo n.º 30
0
def backfill_flowrun_path(ActionSet, FlowRun, FlowStep):
    # start by ensuring all flows are at a minimum version
    if not migrate_flows('10.4'):
        raise ValueError("Migration can't proceed because some flows couldn't be migrated")

    # get all flow action sets
    action_sets = list(ActionSet.objects.filter(flow__is_active=True))
    if not action_sets:
        return

    print("Found %d flow action sets..." % len(action_sets))

    # make map of action set node UUIDs to their exit UUIDs
    action_set_uuid_to_exit = {a.uuid: a.exit_uuid for a in action_sets if a.exit_uuid}

    if len(action_sets) != len(action_set_uuid_to_exit):
        raise ValueError(
            "Found actionsets without exit_uuids, use migrate_flows command to migrate these flows forward"
        )

    # has this migration been run before but didn't complete?
    highpoint = cache.get('path_mig_highpoint')

    # get all flow run ids we're going to migrate
    run_ids = FlowRun.objects.filter(flow__is_active=True).values_list('id', flat=True).order_by('id')

    if highpoint:
        print("Resuming from previous highpoint at run #%d" % int(highpoint))
        run_ids = run_ids.filter(id__gt=int(highpoint))

    print("Fetching runs that need to be migrated (hold tight)...")

    run_ids = array(str('l'), run_ids)

    print("Found %d runs that need to be migrated" % len(run_ids))

    num_updated = 0
    num_trimmed = 0
    start = time.time()

    # we want to prefetch steps with each flow run, in chronological order
    steps_prefetch = Prefetch('steps', queryset=FlowStep.objects.only('step_uuid', 'step_type', 'rule_uuid', 'arrived_on').order_by('arrived_on'))

    for id_batch in chunk_list(run_ids, 1000):
        batch = FlowRun.objects.filter(id__in=id_batch).order_by('id').prefetch_related(steps_prefetch)

        for run in batch:
            path = []
            for step in run.steps.all():
                step_dict = {PATH_NODE_UUID: step.step_uuid, PATH_ARRIVED_ON: step.arrived_on.isoformat()}
                if step.step_type == 'R':
                    step_dict[PATH_EXIT_UUID] = step.rule_uuid
                else:
                    exit_uuid = action_set_uuid_to_exit.get(step.step_uuid)
                    if exit_uuid:
                        step_dict[PATH_EXIT_UUID] = exit_uuid

                path.append(step_dict)

            # trim path if necessary
            if len(path) > PATH_MAX_STEPS:
                path = path[len(path) - PATH_MAX_STEPS:]
                num_trimmed += 1

            run.path = json.dumps(path)
            run.save(update_fields=('path',))

            cache.set('path_mig_highpoint', str(run.id), 60 * 60 * 24 * 7)

        num_updated += len(batch)
        updated_per_sec = num_updated / (time.time() - start)

        # figure out estimated time remaining
        time_remaining = ((len(run_ids) - num_updated) / updated_per_sec)
        finishes = timezone.now() + timedelta(seconds=time_remaining)

        print(" > Updated %d runs of %d (%2.2f per sec) Est finish: %s" % (num_updated, len(run_ids), updated_per_sec, finishes))

    print("Run path migration completed in %d mins. %d paths were trimmed" % ((int(time.time() - start) / 60), num_trimmed))