Esempio n. 1
0
def getcartproducts(request):
    products = []
    r_token = request.META['HTTP_AUTHORIZATION']
    new_token = r_token.split(' ', 1)[1]
    token = Token.objects.filter(key=new_token).first()
    buyer = Buyer.objects.filter(user_ptr_id=token.user.id).first()
    cart_items = Order_Product.objects.filter(buyer=buyer, checkout__isnull=True)
    for order in cart_items:
        if order.quantity is not None:
            print(order.quantity)
            product = Product.objects.filter(id=order.product.id).annotate(order_id=Sum(order.id, output_field=IntegerField()),quantity=Sum(order.quantity, output_field=FloatField()), total=Sum(order.total, output_field=FloatField())).first()
            print(product.quantity)
            products.append(product)
    data = CustomCartSerializer(products, many=True)    
    context = {
        'data': data.data
    }

    return Response(data.data,status=status.HTTP_200_OK)
Esempio n. 2
0
class Month(Func):
    function = 'EXTRACT'
    template = '%(function)s(MONTH from %(expressions)s)'
    output_field = IntegerField()
Esempio n. 3
0
    def get_context_data(self, **kwargs):
        context = super(AvailableTasksView, self).get_context_data(**kwargs)
        context['form'] = self.form
        context['tab_count'] = context['paginator'].count

        if self.priority:
            workflows = Workflow.objects\
                .unclosed()\
                .filter(priority=True)\
                .select_related('country', 'locality')\
                .annotate(
                    n_tasks_open=Subquery(
                        Task.objects.filter(workflows=OuterRef('pk'), state=Task.OPEN, assigned_to=None)
                        .values('workflows__pk')
                        .annotate(cnt=Count(1))
                        .values('cnt'),
                        output_field=IntegerField()),
                    n_tasks_assigned=Subquery(
                        Task.objects.filter(workflows=OuterRef('pk'), state=Task.OPEN)
                        .exclude(assigned_to=None)
                        .values('workflows__pk')
                        .annotate(cnt=Count(1))
                        .values('cnt'),
                        output_field=IntegerField()),
                    n_tasks_pending_review=Subquery(
                        Task.objects.filter(workflows=OuterRef('pk'), state=Task.PENDING_REVIEW)
                        .values('workflows__pk')
                        .annotate(cnt=Count(1))
                        .values('cnt'),
                        output_field=IntegerField()),
                    n_tasks_done=Subquery(
                        Task.objects.filter(workflows=OuterRef('pk'), state=Task.DONE)
                        .values('workflows__pk')
                        .annotate(cnt=Count(1))
                        .values('cnt'),
                        output_field=IntegerField()),
                    n_tasks_cancelled=Subquery(
                        Task.objects.filter(workflows=OuterRef('pk'), state=Task.CANCELLED)
                        .values('workflows__pk')
                        .annotate(cnt=Count(1))
                        .values('cnt'),
                        output_field=IntegerField()),
                    ).all()

            # sort by due date (desc + nulls last), then by id (asc)
            def key(x):
                return [
                    -x.due_date.toordinal() if x.due_date else math.inf, x.pk
                ]

            context['priority_workflows'] = sorted(workflows, key=key)

            for w in context['priority_workflows']:
                w.task_counts = [(state, getattr(w, f'n_tasks_{state}')
                                  or 0, state.replace('_', ' ')) for state in [
                                      'open', 'assigned', 'pending_review',
                                      'done', 'cancelled'
                                  ]]
                w.n_tasks = sum(n for s, n, l in w.task_counts)
                w.n_tasks_complete = (w.n_tasks_done
                                      or 0) + (w.n_tasks_cancelled or 0)
                w.pct_complete = (w.n_tasks_complete or 0) / (w.n_tasks
                                                              or 1) * 100.0

        return context
Esempio n. 4
0
class RegrCount(StatAggregate):
    function = 'REGR_COUNT'
    output_field = IntegerField()

    def convert_value(self, value, expression, connection):
        return 0 if value is None else value
Esempio n. 5
0
class Job(Model):
    jobid = CharField(
        null=False, max_length=32,
        blank=True, default="", db_index=True
    )
    jobname = CharField(null=False, max_length=128, blank=True, default="")
    queue = CharField(
        null=False, max_length=128,
        blank=True, default=""
    )
    qtime = IntegerField(null=False, blank=True, default=0, db_index=True)
    starttime = IntegerField(null=False, blank=True, default=0)
    endtime = IntegerField(null=False, blank=True, default=0)
    submiter = CharField(
        null=False, max_length=32,
        blank=True, default=""
    )
    submit_args = CharField(null=False, max_length=260, blank=True, default="")
    jobstatus = CharField(
        null=False, max_length=12,
        blank=True, default=""
    )
    walltime = CharField(null=False, max_length=24, blank=True, default="")
    nodescount = IntegerField(null=False, blank=True, default=0)
    cpuscount = IntegerField(null=False, blank=True, default=0)
    priority = CharField(null=False, max_length=12, blank=True, default="")
    exechosts = CharField(null=False, max_length=512, blank=True, default="")
    type = CharField(null=False, max_length=24, blank=True, default="")
    jobfilename = CharField(null=False, max_length=260, blank=True, default="")
    workingdir = CharField(null=False, max_length=260, blank=True, default="")
    mail = CharField(null=False, max_length=260, blank=True, default="")
    mailtrigger = CharField(null=False, max_length=12, blank=True, default="")
    mpi_prog = CharField(null=False, max_length=260, blank=True, default="")
    mpi_prog_arguments = CharField(
        null=False, max_length=64, blank=True, default=""
    )
    pnodescount = IntegerField(null=False, blank=True, default=0)
    ppn = IntegerField(null=False, blank=True, default=0)
    pmem = IntegerField(null=False, blank=True, default=0)
    status = CharField(
        null=False, max_length=24,
        blank=True, default=""
    )
    isdeleted = BooleanField(null=False, blank=True, default=False)
    operatestatus = CharField(
        null=False, max_length=24, blank=True, default=""
    )
    aioperatestatus = CharField(
        null=False, max_length=24, blank=True, default=""
    )
    charge = IntegerField(null=False, blank=True, default=0)
    wallduration = IntegerField(null=False, blank=True, default=0)
    billgroup = CharField(null=False, max_length=32, blank=True, default="")
    workspace = CharField(null=False, max_length=260, blank=True, default="")
    json_body = TextField(null=False, blank=True, default="")
    gpuscount = IntegerField(null=False, blank=True, default=0)
    gpucharge = IntegerField(null=False, blank=True, default=0)
    gpusexechosts = CharField(
        null=False, max_length=512, blank=True, default=""
    )
    resumejobid = CharField(null=False, max_length=32, blank=True, default="")
    errfile = TextField(null=False, blank=True, default="")
    outfile = TextField(null=False, blank=True, default="")

    def get_absolute_url(self):
        return "/jobs/%i" % self.id

    def as_dict(self, **karg):

        import datetime
        import time

        def timeToUTC(times):
            return str(datetime.datetime.utcfromtimestamp(times))

        fields = karg.get('fields', [])
        exclude = karg.get('exclude', [])

        if karg.get('overview') is True:
            fields = [
                'id', "jobid", "jobname", "queue", "status", "jobstatus",
                "qtime", "starttime", "endtime", "nodescount", "cpuscount",
                "workingdir", "type", "jobfilename", "submit_args", "charge",
                "submiter", "walltime", "priority", "exechosts", "mail",
                "mailtrigger", "mpi_prog", "mpi_prog_arguments", "pnodescount",
                "ppn", "pmem", "operatestatus", "aioperatestatus",
                "wallduration", "billgroup", "workspace"
            ]
        data = model_to_dict(self, fields=fields, exclude=exclude)

        # data["url"] = self.get_absolute_url()
        data["currtime"] = timeToUTC(int(time.time()))
        data["timezone"] = time.timezone
        data["starttime"] = timeToUTC(data["starttime"])
        data["endtime"] = timeToUTC(data["endtime"])
        data["qtime"] = timeToUTC(data["qtime"])

        return data

    def save(self, *args, **kwargs):

        if not self.id:
            job_status_mapping = {
                "c": "completed",
                "r": "running",
                "q": "queueing",
                "s": "suspending",
                "w": "waiting",
                "h": "holding"
            }

            op_status_mapping = {
                "cancelling": "cancelling",
                "creating": "creating",
                "cancelled": "cancelled",
                "createfailed": "createfailed"
            }

            if self.jobstatus.lower():
                self.status = job_status_mapping[self.jobstatus.lower()]
            if self.operatestatus:
                self.status = op_status_mapping[self.operatestatus]

        super(Job, self).save(*args, **kwargs)
Esempio n. 6
0
    def get_queryset(self, pk=None):
        """All videos except for null ones."""
        queryset = Video.objects.filter(is_unlisted=False).values()
        request = self.request

        fields = [x.name for x in Video._meta.fields]
        for f in VIDEO_FIELDS:
            fields.remove(f)

        def get_score_annotation(user_preferences_vector):
            """Returns an sql object annotating queries with the video ratings (sclar product)."""
            return sum([
                F(f) * v for f, v in zip(VIDEO_FIELDS, user_preferences_vector)
            ])

        features = self.get_features_from_request()
        search_username = self.need_scores_for_username()

        # computing score inside the database
        if search_username:
            queryset = queryset.values(*fields)
            queryset = queryset.annotate(
                **{key: F(f'videorating__{key}')
                   for key in VIDEO_FIELDS},
                user=F('videorating__user__user__username')).filter(
                    user=search_username)

            # for myself, allow showing public/non-public videos
            if search_username == request.user.username:
                is_public = request.query_params.get('show_all_my_videos',
                                                     'true') == 'false'
                print(is_public)
            else:  # for other people, only show public videos
                is_public = True

            # keeping only public videos
            if is_public:
                queryset = VideoRatingPrivacy._annotate_privacy(
                    queryset,
                    prefix='videoratingprivacy',
                    field_user=None,
                    filter_add={
                        'videoratingprivacy__user__user__username':
                        search_username
                    })
                queryset = queryset.filter(_is_public=True)

            queryset = queryset.annotate(
                rating_n_experts=Value(1, IntegerField()))

            q1 = Q(expertrating_video_1__user__user__username=search_username)
            q2 = Q(expertrating_video_2__user__user__username=search_username)

            c1 = Count('expertrating_video_1', q1, distinct=True)
            c2 = Count('expertrating_video_2', q2, distinct=True)

            queryset = queryset.annotate(rating_n_ratings=c1 + c2)

            # logging model usage in search
            if self.request.user.is_authenticated:
                RepresentativeModelUsage.objects.get_or_create(
                    viewer=UserPreferences.objects.get(
                        user__username=self.request.user.username),
                    model=UserPreferences.objects.get(
                        user__username=search_username))

        queryset = queryset.annotate(
            score_preferences_term=get_score_annotation(features))

        queryset = queryset.annotate(
            score_search_term_=Value(0.0, FloatField()))

        if request.query_params.get('search'):
            # computing the postgres score for search
            if connection.vendor.startswith('postgres'):
                s_query = request.query_params.get('search', '')

                def word_to_query(w):
                    """Convert one word into a query."""
                    queries = []

                    queries.append(SearchQuery(w, search_type='raw'))
                    queries.append(SearchQuery(w + ':*', search_type='raw'))

                    return reduce(lambda x, y: x | y, queries)

                def words_to_query(s_query, max_len=100, max_word_len=20):
                    """Convert a string with words into a SearchQuery."""
                    s_query = s_query[:max_len]
                    s_query = s_query.split(' ')
                    s_query = [
                        ''.join(filter(str.isalnum, x)) for x in s_query
                    ]
                    s_query = [
                        x for x in s_query if 1 <= len(x) <= max_word_len
                    ]
                    s_query = [word_to_query(x) for x in s_query]
                    if not s_query:
                        return SearchQuery('')
                    return reduce(lambda x, y: x & y, s_query)

                s_query = words_to_query(s_query)

                s_vectors = [
                    SearchVector(f, weight=w)
                    for f, w in zip(self.search_fields, self.search_weights)
                ]
                s_vector = reduce(lambda x, y: x + y, s_vectors)

                queryset = queryset.annotate(
                    score_search_term_=SearchRank(s_vector, s_query))
            else:
                # in other databases, using basic filtering
                queryset = filters_.SearchFilter().filter_queryset(
                    self.request, queryset, self)
                queryset = queryset.annotate(
                    score_search_term_=Value(1.0, FloatField()))

        queryset = queryset.annotate(
            score_search_term=F('score_search_term_') *
            VideoSearchEngine.VIDEO_SEARCH_COEFF)
        queryset = queryset.annotate(score=F('score_preferences_term') +
                                     F('score_search_term'))

        return queryset
Esempio n. 7
0
class EquipmentInstanceAlertPeriod(Model):
    """Equipment Instance Alert Period."""

    RELATED_NAME = 'equipment_instance_alert_periods'
    RELATED_QUERY_NAME = 'equipment_instance_alert_period'

    equipment_unique_type_group = \
        ForeignKey(
            to=EquipmentUniqueTypeGroup,
            related_name=RELATED_NAME,
            related_query_name=RELATED_QUERY_NAME,
            blank=False,
            null=False,
            on_delete=PROTECT)

    equipment_instance = \
        ForeignKey(
            to=EquipmentInstance,
            related_name=RELATED_NAME,
            related_query_name=RELATED_QUERY_NAME,
            blank=False,
            null=False,
            on_delete=PROTECT)

    risk_score_name = \
        CharField(
            max_length=MAX_CHAR_LEN,
            blank=False,
            null=False,
            unique=False,
            db_index=True)

    threshold = \
        FloatField(
            blank=False,
            null=False,
            default=0,
            db_index=True)

    from_date = \
        DateField(
            blank=False,
            null=False,
            auto_now=False,
            auto_created=False,
            default=None,
            db_index=True)

    to_date = \
        DateField(
            blank=False,
            null=False,
            auto_now=False,
            auto_created=False,
            default=None,
            db_index=True)

    date_range = \
        DateRangeField(
            blank=True,
            null=True)

    duration = \
        IntegerField(
            blank=False,
            null=False,
            default=0)

    cumulative_excess_risk_score = \
        FloatField(
            blank=False,
            null=False,
            default=0)

    approx_average_risk_score = \
        FloatField(
            blank=False,
            null=False,
            default=0)

    last_risk_score = \
        FloatField(
            blank=False,
            null=False,
            default=0)

    ongoing = \
        BooleanField(
            blank=False,
            null=False,
            default=False,
            db_index=True)

    info = \
        JSONField(
            blank=True,
            null=True,
            default=dict)

    diagnosis_status = \
        ForeignKey(
            to=AlertDiagnosisStatus,
            blank=True,
            null=True,
            on_delete=PROTECT)

    equipment_instance_alarm_periods = \
        ManyToManyField(
            to=EquipmentInstanceAlarmPeriod,
            through=(EquipmentInstanceAlarmPeriod
                     .equipment_instance_alert_periods.through),
            related_name=RELATED_NAME + '_reverse',
            related_query_name=RELATED_QUERY_NAME,
            blank=True)

    has_associated_equipment_instance_alarm_periods = \
        BooleanField(
            blank=False,
            null=False,
            default=False,
            db_index=True)

    equipment_instance_problem_diagnoses = \
        ManyToManyField(
            to=EquipmentInstanceProblemDiagnosis,
            through=(EquipmentInstanceProblemDiagnosis
                     .equipment_instance_alert_periods.through),
            related_name=RELATED_NAME + '_reverse',
            related_query_name=RELATED_QUERY_NAME,
            blank=True)

    has_associated_equipment_instance_problem_diagnoses = \
        BooleanField(
            blank=False,
            null=False,
            default=False,
            db_index=True)

    class Meta:
        """Metadata."""

        verbose_name = 'Equipment Instance Alert Period'
        verbose_name_plural = 'Equipment Instance Alert Periods'

        unique_together = \
            ('equipment_unique_type_group',
             'equipment_instance',
             'risk_score_name',
             'threshold',
             'from_date'), \
            ('equipment_unique_type_group',
             'equipment_instance',
             'risk_score_name',
             'threshold',
             'to_date')

        ordering = \
            'diagnosis_status', \
            '-ongoing', \
            'risk_score_name', \
            '-threshold', \
            '-cumulative_excess_risk_score'

    def __str__(self):
        """Return string repr."""
        if self.diagnosis_status is None:
            self.save()

        return (f'{self.diagnosis_status.name.upper()}: ' + (
            'ONGOING ' if self.ongoing else ''
        ) + 'Alert on ' + (
            f'{self.equipment_unique_type_group.equipment_general_type.name.upper()} '  # noqa: E501
            f'{self.equipment_unique_type_group.name} '
            f'#{self.equipment_instance.name} '
            f'from {self.from_date} to {self.to_date} '
            f'w Approx Avg Risk Score {self.approx_average_risk_score:,.1f} '
            f'(Last: {self.last_risk_score:,.1f}) '
            f'(based on {self.risk_score_name} > {self.threshold}) '
            f'for {self.duration:,} Day(s)'))

    def save(self, *args, **kwargs):
        """Save."""
        self.date_range = \
            DateRange(
                lower=self.from_date,
                upper=self.to_date,
                bounds='[]',
                empty=False)

        self.duration = duration = \
            (self.to_date - self.from_date).days + 1

        self.approx_average_risk_score = \
            self.threshold + \
            (self.cumulative_excess_risk_score / duration)

        if self.diagnosis_status is None:
            self.diagnosis_status = \
                AlertDiagnosisStatus.objects.get_or_create(index=0)[0]

        super().save(*args, **kwargs)
Esempio n. 8
0
def exclude_records_by_age_for_column(exclude_config, column):
    return Case(When(~Q(**exclude_config), then=F(column)),
                default=0,
                output_field=IntegerField())
Esempio n. 9
0
 def with_month_year(self):
     return self.annotate(month=Func(
         F("created_on"), function="month",
         output_field=IntegerField())).annotate(year=Func(
             F("created_on"), function="year", output_field=IntegerField()))
Esempio n. 10
0
def addReview(request,product_id):
    review = []
    r_token = request.META['HTTP_AUTHORIZATION']
    new_token = r_token.split(' ', 1)[1]
    token = Token.objects.filter(key=new_token).first()
    buyer = Buyer.objects.filter(user_ptr_id=token.user.id).first()
    product = Product.objects.filter(id = product_id).first()
    comment = request.data.get("comments")
    ratings = request.data.get("ratings")

    new_review = Review.objects.create(
        buyer = buyer,
        product = product,
        comments = comment,
        ratings = int(ratings),
    )

    reviews = Review.objects.filter(product = product_id)

    for order in reviews:
        buyers = Buyer.objects.filter(id = order.buyer.id).annotate(ratings = Sum(order.ratings, output_field = IntegerField()), comments = Value(order.comments, output_field = CharField()), created_at = Value(order.created_at, output_field = DateTimeField())).first()
        review.append(buyers)

    data = ReviewSerializer(review, many=True)

    return Response(data.data, status=status.HTTP_200_OK)
Esempio n. 11
0
def getBuyerOrders(request):
    # print(request.META['HTTP_AUTHORIZATION'])
    products = []
    r_token = request.META['HTTP_AUTHORIZATION']
    new_token = r_token.split(' ', 1)[1]
    token = Token.objects.filter(key=new_token).first()
    buyer = Buyer.objects.filter(user_ptr_id=token.user.id).first()
    if not buyer:
        return Response(
            data={
                "Message": "You are not logged in"
            },
            status=status.HTTP_400_BAD_REQUEST
        )
    wishlistProducts = Wishlist.objects.filter(buyer=buyer)
    for order in wishlistProducts:
        allwishlistproducts = Product.objects.filter(id = order.product.id).annotate(wishlist_id=Sum(order.id, output_field=IntegerField())).first()
        products.append(allwishlistproducts)
    data = CustomWishlistSerializer(products, many=True)    
    context = {
        'data': data.data
    }

    return Response(data.data,status=status.HTTP_200_OK)
Esempio n. 12
0
def getReview(request, product_id):
    review = []
    reviews = Review.objects.filter(product = product_id)
    for order in reviews:
        buyers = Buyer.objects.filter(id = order.buyer.id).annotate(ratings = Sum(order.ratings, output_field = IntegerField()), comments = Value(order.comments, output_field = CharField()), created_at = Value(order.created_at, output_field = DateTimeField())).first()
        review.append(buyers)

    data = ReviewSerializer(review, many=True)

    return Response(data.data, status=status.HTTP_200_OK)
Esempio n. 13
0
def getvariantoptions(request, product_id):
    variants = []
    variant_option = Product_Variant_Options.objects.filter(product = product_id)
    for order in variant_option:
        parent_variant = Variant_Option.objects.filter(id=order.variant_options.id)

        for order2 in parent_variant:
            variant = Variant.objects.filter(id=order2.variant.id).annotate(variant_option_id = Sum(order2.id, output_field = IntegerField()), variant_option_name = Value(order2.name, output_field = CharField())).first()
            variants.append(variant)
    data = CustomVariantsSerializer(variants, many=True)    
    context = {
        'data': data.data
    }

    return Response(data.data,status=status.HTTP_200_OK)
Esempio n. 14
0
def addtowishlist(request, product_id):
    # print(request.META['HTTP_AUTHORIZATION'])
    products = []
    r_token = request.META['HTTP_AUTHORIZATION']
    new_token = r_token.split(' ', 1)[1]
    product = Product.objects.filter(id=product_id).first()
    token = Token.objects.filter(key=new_token).first()
    buyer = Buyer.objects.filter(user_ptr_id=token.user.id).first()
    if not product_id:
        return Response(
            data={
                "Message": "Product does not exist"
            },
            status=status.HTTP_400_BAD_REQUEST
        )
    if Wishlist.objects.filter(buyer = buyer, product = product).exists():
        return Response(
            data={
                "Message": "Product already in wishlist"
            },
            status=status.HTTP_400_BAD_REQUEST
        )
    new_wishlistProduct = Wishlist.objects.create(
        product=product,
        buyer=buyer,
    )

    productID = Wishlist.objects.filter(buyer=buyer)
    for order in productID:
        Wishlist_product = Product.objects.filter(id = order.product.id).annotate(wishlist_id=Sum(order.id, output_field=IntegerField())).first()
        products.append(Wishlist_product)
    data = CustomWishlistSerializer(products, many=True)    
    context = {
        'data': data.data
    }
    return Response(data.data,status=status.HTTP_201_CREATED)
Esempio n. 15
0
class UsedItem(SoftDeletionModel):
    item = ForeignKey(Item, on_delete=models.CASCADE)
    quantity = IntegerField()
Esempio n. 16
0
class Interaction(LastModifiedModel):
    class Synergy(IntegerChoices):

        UNKNOWN = (0, "Inconnue")
        NEUTRAL = (1, "Neutre")
        ADDITIVE = (5, "Addition")
        DECREASE = (2, "Atténuation")
        INCREASE = (3, "Potentialisation")
        MIXED = (4, "Mixte")

    class Risk(IntegerChoices):

        UNKNOWN = (0, "Inconnu")
        NEUTRAL = (1, "Neutre")
        CAUTION = (2, "Vigilance")
        UNSAFE = (3, "Risqué")
        DANGEROUS = (4, "Dangereux")

    class Reliability(IntegerChoices):

        UNKNOWN = (0, "Non évaluée")
        HYPOTHETICAL = (1, "Théorique")
        INFERRED = (2, "Supposée")
        PROVEN = (3, "Avérée")

    from_drug = ForeignKey('Drug',
                           CASCADE,
                           related_name='interactions_from',
                           verbose_name="première substance")
    to_drug = ForeignKey('Drug',
                         CASCADE,
                         related_name='interactions_to',
                         verbose_name="seconde substance")
    risk = IntegerField(choices=Risk.choices,
                        default=Risk.UNKNOWN,
                        verbose_name="risques")
    synergy = IntegerField(choices=Synergy.choices,
                           default=Synergy.UNKNOWN,
                           verbose_name="synergie")
    risk_reliability = IntegerField(choices=Reliability.choices,
                                    default=Reliability.UNKNOWN,
                                    verbose_name="fiabilité des risques")
    effects_reliability = IntegerField(
        choices=Reliability.choices,
        default=Reliability.UNKNOWN,
        verbose_name="fiabilité de la synergie et des effets")
    risk_description = TextField(default='',
                                 blank=True,
                                 verbose_name="description des risques",
                                 help_text=markdown_allowed())
    effect_description = TextField(default='',
                                   blank=True,
                                   verbose_name="description des effets",
                                   help_text=markdown_allowed())
    notes = TextField(
        default='', blank=True,
        verbose_name="notes",
        help_text="Ce champ n'est visible que sur ce site " \
            "d'administration et est partagé entre tous les " \
            "utilisateurs."
    )
    is_draft = BooleanField(
        default=True,
        verbose_name="brouillon",
        help_text="En cas de travail en cours, de données incertaines" \
            " ou incomplètes."
    )

    history = HistoricalRecords()
    objects = InteractionManager()

    def __str__(self):
        return f"{self.from_drug.name} + {self.to_drug.name}"

    def get_absolute_url(self):
        return reverse(
            'combine',
            kwargs={'slugs': (self.from_drug.slug, self.to_drug.slug)})

    def other_interactant(self, drug):
        index = self.interactants.index(drug)
        return self.interactants[not index]

    def get_contrib_email_body(self):
        return render_to_string('drugcombinator/mail/contrib_body.txt',
                                {'interaction': self})

    @property
    def interactants(self):
        return (self.from_drug, self.to_drug)

    @interactants.setter
    def interactants(self, interactants):
        interactants = sorted(interactants, key=attrgetter('name'))
        self.from_drug, self.to_drug = interactants

    def sort_interactants(self):
        # The interactants property setter will handle interactants
        # reordering
        self.interactants = self.interactants

    def save(self, *args, **kwargs):
        self.sort_interactants()
        super().save(*args, **kwargs)

    @classmethod
    def get_dummy_risks(cls):
        return [cls(risk=risk) for risk in cls.Risk.values]

    @classmethod
    def get_dummy_synergies(cls):
        return [cls(synergy=synergy) for synergy in cls.Synergy.values]

    class Meta:
        constraints = (CheckConstraint(check=~Q(from_drug=F('to_drug')),
                                       name='interactants_inequals'),
                       UniqueConstraint(fields=('from_drug', 'to_drug'),
                                        name='interactants_unique_together'))
        verbose_name = "interaction"
Esempio n. 17
0
def get_other_units(unit):
    """Returns other units to show while translating."""
    result = {
        "total": 0,
        "skipped": False,
        "same": [],
        "matching": [],
        "context": [],
        "source": [],
    }

    allow_merge = False
    untranslated = False
    translation = unit.translation
    component = translation.component
    propagation = component.allow_translation_propagation
    same = None

    if unit.source and unit.context:
        match = Q(source=unit.source) & Q(context=unit.context)
        if component.has_template():
            query = Q(source=unit.source) | Q(context=unit.context)
        else:
            query = Q(source=unit.source)
    elif unit.source:
        match = Q(source=unit.source) & Q(context="")
        query = Q(source=unit.source)
    elif unit.context:
        match = Q(context=unit.context)
        query = Q(context=unit.context)
    else:
        return result

    units = (Unit.objects.filter(
        query,
        translation__component__project=component.project,
        translation__language=translation.language,
    ).annotate(matches_current=Case(
        When(condition=match, then=1), default=0,
        output_field=IntegerField())).order_by("-matches_current"))

    units_count = units.count()

    # Is it only this unit?
    if units_count == 1:
        return result

    result["total"] = units_count
    result["skipped"] = units_count > 20

    for item in units[:20]:
        item.allow_merge = item.differently_translated = (
            item.translated and item.target != unit.target)
        item.is_propagated = (
            propagation
            and item.translation.component.allow_translation_propagation
            and item.translation.plural_id == translation.plural_id
            and item.source == unit.source and item.context == unit.context)
        untranslated |= not item.translated
        allow_merge |= item.allow_merge
        if item.pk == unit.pk:
            same = item
            result["same"].append(item)
        elif item.source == unit.source and item.context == unit.context:
            result["matching"].append(item)
        elif item.source == unit.source:
            result["source"].append(item)
        elif item.context == unit.context:
            result["context"].append(item)

    # Slightly different logic to allow applying current translation to
    # the propagated strings
    if same is not None:
        same.allow_merge = ((untranslated or allow_merge) and same.translated
                            and propagation)
        allow_merge |= same.allow_merge

    result["total"] = sum(
        len(result[x]) for x in ("matching", "source", "context"))
    result["allow_merge"] = allow_merge

    return result
Esempio n. 18
0
    def test_trunc_func(self):
        start_datetime = microsecond_support(datetime(2015, 6, 15, 14, 30, 50, 321))
        end_datetime = microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123))
        if settings.USE_TZ:
            start_datetime = timezone.make_aware(start_datetime, is_dst=False)
            end_datetime = timezone.make_aware(end_datetime, is_dst=False)
        self.create_model(start_datetime, end_datetime)
        self.create_model(end_datetime, start_datetime)

        msg = 'output_field must be either DateField, TimeField, or DateTimeField'
        with self.assertRaisesMessage(ValueError, msg):
            list(DTModel.objects.annotate(truncated=Trunc('start_datetime', 'year', output_field=IntegerField())))

        with self.assertRaisesMessage(AssertionError, "'name' isn't a DateField, TimeField, or DateTimeField."):
            list(DTModel.objects.annotate(truncated=Trunc('name', 'year', output_field=DateTimeField())))

        with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"):
            list(DTModel.objects.annotate(truncated=Trunc('start_date', 'second')))

        with self.assertRaisesMessage(ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"):
            list(DTModel.objects.annotate(truncated=Trunc('start_time', 'month')))

        with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"):
            list(DTModel.objects.annotate(truncated=Trunc('start_date', 'month', output_field=DateTimeField())))

        with self.assertRaisesMessage(ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"):
            list(DTModel.objects.annotate(truncated=Trunc('start_time', 'second', output_field=DateTimeField())))

        def test_datetime_kind(kind):
            self.assertQuerysetEqual(
                DTModel.objects.annotate(
                    truncated=Trunc('start_datetime', kind, output_field=DateTimeField())
                ).order_by('start_datetime'),
                [
                    (truncate_to(start_datetime, kind)),
                    (truncate_to(end_datetime, kind))
                ],
                lambda m: (m.truncated)
            )

        def test_date_kind(kind):
            self.assertQuerysetEqual(
                DTModel.objects.annotate(
                    truncated=Trunc('start_date', kind, output_field=DateField())
                ).order_by('start_datetime'),
                [
                    (truncate_to(start_datetime.date(), kind)),
                    (truncate_to(end_datetime.date(), kind))
                ],
                lambda m: (m.truncated)
            )

        def test_time_kind(kind):
            self.assertQuerysetEqual(
                DTModel.objects.annotate(
                    truncated=Trunc('start_time', kind, output_field=TimeField())
                ).order_by('start_datetime'),
                [
                    (truncate_to(start_datetime.time(), kind)),
                    (truncate_to(end_datetime.time(), kind))
                ],
                lambda m: (m.truncated)
            )

        test_date_kind('year')
        test_date_kind('month')
        test_date_kind('day')
        test_time_kind('hour')
        test_time_kind('minute')
        test_time_kind('second')
        test_datetime_kind('year')
        test_datetime_kind('month')
        test_datetime_kind('day')
        test_datetime_kind('hour')
        test_datetime_kind('minute')
        test_datetime_kind('second')

        qs = DTModel.objects.filter(start_datetime__date=Trunc('start_datetime', 'day', output_field=DateField()))
        self.assertEqual(qs.count(), 2)
Esempio n. 19
0
class EquipmentInstanceProblemDiagnosis(Model):
    """Equipment Instance Problem Diagnosis."""

    RELATED_NAME = 'equipment_instance_problem_diagnoses'
    RELATED_QUERY_NAME = 'equipment_instance_problem_diagnosis'

    equipment_instance = \
        ForeignKey(
            to=EquipmentInstance,
            related_name=RELATED_NAME,
            related_query_name=RELATED_QUERY_NAME,
            blank=False,
            null=False,
            on_delete=PROTECT)

    from_date = \
        DateField(
            blank=False,
            null=False,
            db_index=True)

    to_date = \
        DateField(
            blank=True,
            null=True,
            db_index=True)

    date_range = \
        DateRangeField(
            blank=True,
            null=True)

    duration = \
        IntegerField(
            blank=True,
            null=True)

    equipment_problem_types = \
        ManyToManyField(
            to=EquipmentProblemType,
            related_name=RELATED_NAME,
            related_query_name=RELATED_QUERY_NAME,
            blank=True)

    has_equipment_problems = \
        BooleanField(
            blank=False,
            null=False,
            default=False,
            db_index=True)

    dismissed = \
        BooleanField(
            blank=False,
            null=False,
            default=False,
            db_index=True)

    comments = \
        TextField(
            blank=True,
            null=True)

    equipment_instance_alarm_periods = \
        ManyToManyField(
            to=EquipmentInstanceAlarmPeriod,
            through=(EquipmentInstanceAlarmPeriod
                     .equipment_instance_problem_diagnoses.through),
            related_name=RELATED_NAME + '_reverse',
            related_query_name=RELATED_QUERY_NAME,
            blank=True)

    has_associated_equipment_instance_alarm_periods = \
        BooleanField(
            blank=False,
            null=False,
            default=False,
            db_index=True)

    equipment_instance_alert_periods = \
        ManyToManyField(
            to='EquipmentInstanceAlertPeriod',
            related_name=RELATED_NAME + '_reverse',
            related_query_name=RELATED_QUERY_NAME,
            blank=True)

    has_associated_equipment_instance_alert_periods = \
        BooleanField(
            blank=False,
            null=False,
            default=False,
            db_index=True)

    class Meta:
        """Metadata."""

        verbose_name = 'Equipment Instance Problem Diagnosis'
        verbose_name_plural = 'Equipment Instance Problem Diagnoses'

        unique_together = 'equipment_instance', 'from_date'

        ordering = 'dismissed', '-to_date', 'from_date'

    def __str__(self):
        """Return string repr."""
        return (f'{self.equipment_instance} from {self.from_date} ' +
                (f'to {self.to_date}' if self.to_date else '(ONGOING)') + (
                    ': {}'.format(  # pylint: disable=consider-using-f-string
                        ', '.join(equipment_problem_type.name.upper()
                                  for equipment_problem_type in
                                  self.equipment_problem_types.all()))
                    if self.equipment_problem_types.count() else '') +
                (' (DISMISSED)' if self.dismissed else ''))

    def save(self, *args, **kwargs):
        """Save."""
        self.date_range = \
            DateRange(
                lower=self.from_date,
                upper=self.to_date,
                bounds='[]',
                empty=False)

        self.duration = \
            (self.to_date - self.from_date).days + 1 \
            if self.to_date \
            else None

        super().save(*args, **kwargs)
Esempio n. 20
0
def get_grouped_items(event,
                      subevent=None,
                      voucher=None,
                      channel='web',
                      require_seat=0,
                      base_qs=None,
                      allow_addons=False,
                      quota_cache=None,
                      filter_items=None,
                      filter_categories=None):
    base_qs_set = base_qs is not None
    base_qs = base_qs if base_qs is not None else event.items

    requires_seat = Exists(
        SeatCategoryMapping.objects.filter(product_id=OuterRef('pk'),
                                           subevent=subevent))
    if not event.settings.seating_choice:
        requires_seat = Value(0, output_field=IntegerField())

    items = base_qs.using(settings.DATABASE_REPLICA).filter_available(
        channel=channel, voucher=voucher,
        allow_addons=allow_addons).select_related(
            'category',
            'tax_rule',  # for re-grouping
            'hidden_if_available',
        ).prefetch_related(
            Prefetch('quotas',
                     to_attr='_subevent_quotas',
                     queryset=event.quotas.using(
                         settings.DATABASE_REPLICA).filter(subevent=subevent)),
            Prefetch(
                'bundles',
                queryset=ItemBundle.objects.using(
                    settings.DATABASE_REPLICA).prefetch_related(
                        Prefetch('bundled_item',
                                 queryset=event.items.using(
                                     settings.DATABASE_REPLICA).
                                 select_related('tax_rule').prefetch_related(
                                     Prefetch(
                                         'quotas',
                                         to_attr='_subevent_quotas',
                                         queryset=event.quotas.using(
                                             settings.DATABASE_REPLICA).filter(
                                                 subevent=subevent)), )),
                        Prefetch(
                            'bundled_variation',
                            queryset=ItemVariation.objects.using(
                                settings.DATABASE_REPLICA).select_related(
                                    'item', 'item__tax_rule').
                            filter(item__event=event).prefetch_related(
                                Prefetch('quotas',
                                         to_attr='_subevent_quotas',
                                         queryset=event.quotas.using(
                                             settings.DATABASE_REPLICA).filter(
                                                 subevent=subevent)), )),
                    )),
            Prefetch('variations',
                     to_attr='available_variations',
                     queryset=ItemVariation.objects.using(
                         settings.DATABASE_REPLICA).annotate(
                             subevent_disabled=Exists(
                                 SubEventItemVariation.objects.filter(
                                     variation_id=OuterRef('pk'),
                                     subevent=subevent,
                                     disabled=True,
                                 )), ).filter(active=True,
                                              quotas__isnull=False,
                                              subevent_disabled=False).
                     prefetch_related(
                         Prefetch('quotas',
                                  to_attr='_subevent_quotas',
                                  queryset=event.quotas.using(
                                      settings.DATABASE_REPLICA).filter(
                                          subevent=subevent))).distinct()),
        ).annotate(
            quotac=Count('quotas'),
            has_variations=Count('variations'),
            subevent_disabled=Exists(
                SubEventItem.objects.filter(
                    item_id=OuterRef('pk'),
                    subevent=subevent,
                    disabled=True,
                )),
            requires_seat=requires_seat,
        ).filter(
            quotac__gt=0,
            subevent_disabled=False,
        ).order_by('category__position', 'category_id', 'position', 'name')
    if require_seat:
        items = items.filter(requires_seat__gt=0)
    else:
        items = items.filter(requires_seat=0)

    if filter_items:
        items = items.filter(pk__in=[a for a in filter_items if a.isdigit()])
    if filter_categories:
        items = items.filter(
            category_id__in=[a for a in filter_categories if a.isdigit()])

    display_add_to_cart = False
    quota_cache_key = f'item_quota_cache:{subevent.id if subevent else 0}:{channel}:{bool(require_seat)}'
    quota_cache = quota_cache or event.cache.get(quota_cache_key) or {}
    quota_cache_existed = bool(quota_cache)

    if subevent:
        item_price_override = subevent.item_price_overrides
        var_price_override = subevent.var_price_overrides
    else:
        item_price_override = {}
        var_price_override = {}

    restrict_vars = set()
    if voucher and voucher.quota_id:
        # If a voucher is set to a specific quota, we need to filter out on that level
        restrict_vars = set(voucher.quota.variations.all())

    quotas_to_compute = []
    for item in items:
        if item.has_variations:
            for v in item.available_variations:
                for q in v._subevent_quotas:
                    if q.pk not in quota_cache:
                        quotas_to_compute.append(q)
        else:
            for q in item._subevent_quotas:
                if q.pk not in quota_cache:
                    quotas_to_compute.append(q)

    if quotas_to_compute:
        qa = QuotaAvailability()
        qa.queue(*quotas_to_compute)
        qa.compute()
        quota_cache.update({q.pk: r for q, r in qa.results.items()})

    for item in items:
        if voucher and voucher.item_id and voucher.variation_id:
            # Restrict variations if the voucher only allows one
            item.available_variations = [
                v for v in item.available_variations
                if v.pk == voucher.variation_id
            ]

        if get_all_sales_channels()[channel].unlimited_items_per_order:
            max_per_order = sys.maxsize
        else:
            max_per_order = item.max_per_order or int(
                event.settings.max_items_per_order)

        if item.hidden_if_available:
            q = item.hidden_if_available.availability(_cache=quota_cache)
            if q[0] == Quota.AVAILABILITY_OK:
                item._remove = True
                continue

        item.description = str(item.description)
        for recv, resp in item_description.send(sender=event,
                                                item=item,
                                                variation=None):
            if resp:
                item.description += ("<br/>"
                                     if item.description else "") + resp

        if not item.has_variations:
            item._remove = False
            if not bool(item._subevent_quotas):
                item._remove = True
                continue

            if voucher and (voucher.allow_ignore_quota or voucher.block_quota):
                item.cached_availability = (Quota.AVAILABILITY_OK,
                                            voucher.max_usages -
                                            voucher.redeemed)
            else:
                item.cached_availability = list(
                    item.check_quotas(subevent=subevent,
                                      _cache=quota_cache,
                                      include_bundled=True))

            if event.settings.hide_sold_out and item.cached_availability[
                    0] < Quota.AVAILABILITY_RESERVED:
                item._remove = True
                continue

            item.order_max = min(
                item.cached_availability[1] if item.cached_availability[1]
                is not None else sys.maxsize, max_per_order)

            original_price = item_price_override.get(item.pk,
                                                     item.default_price)
            if voucher:
                price = voucher.calculate_price(original_price)
            else:
                price = original_price

            item.display_price = item.tax(price,
                                          currency=event.currency,
                                          include_bundled=True)

            if price != original_price:
                item.original_price = item.tax(original_price,
                                               currency=event.currency,
                                               include_bundled=True)
            else:
                item.original_price = (
                    item.tax(item.original_price,
                             currency=event.currency,
                             include_bundled=True,
                             base_price_is='net'
                             if event.settings.display_net_prices else
                             'gross')  # backwards-compat
                    if item.original_price else None)

            display_add_to_cart = display_add_to_cart or item.order_max > 0
        else:
            for var in item.available_variations:
                var.description = str(var.description)
                for recv, resp in item_description.send(sender=event,
                                                        item=item,
                                                        variation=var):
                    if resp:
                        var.description += ("<br/>"
                                            if var.description else "") + resp

                if voucher and (voucher.allow_ignore_quota
                                or voucher.block_quota):
                    var.cached_availability = (Quota.AVAILABILITY_OK,
                                               voucher.max_usages -
                                               voucher.redeemed)
                else:
                    var.cached_availability = list(
                        var.check_quotas(subevent=subevent,
                                         _cache=quota_cache,
                                         include_bundled=True))

                var.order_max = min(
                    var.cached_availability[1] if var.cached_availability[1]
                    is not None else sys.maxsize, max_per_order)

                original_price = var_price_override.get(var.pk, var.price)
                if voucher:
                    price = voucher.calculate_price(original_price)
                else:
                    price = original_price

                var.display_price = var.tax(price,
                                            currency=event.currency,
                                            include_bundled=True)

                if price != original_price:
                    var.original_price = var.tax(original_price,
                                                 currency=event.currency,
                                                 include_bundled=True)
                else:
                    var.original_price = (
                        var.tax(var.original_price or item.original_price,
                                currency=event.currency,
                                include_bundled=True,
                                base_price_is='net'
                                if event.settings.display_net_prices else
                                'gross')  # backwards-compat
                    ) if var.original_price or item.original_price else None

                display_add_to_cart = display_add_to_cart or var.order_max > 0

            item.original_price = (
                item.tax(
                    item.original_price,
                    currency=event.currency,
                    include_bundled=True,
                    base_price_is='net' if event.settings.display_net_prices
                    else 'gross')  # backwards-compat
                if item.original_price else None)

            item.available_variations = [
                v for v in item.available_variations
                if v._subevent_quotas and (
                    not voucher or not voucher.quota_id or v in restrict_vars)
            ]

            if event.settings.hide_sold_out:
                item.available_variations = [
                    v for v in item.available_variations
                    if v.cached_availability[0] >= Quota.AVAILABILITY_RESERVED
                ]

            if voucher and voucher.variation_id:
                item.available_variations = [
                    v for v in item.available_variations
                    if v.pk == voucher.variation_id
                ]

            if len(item.available_variations) > 0:
                item.min_price = min([
                    v.display_price.net if event.settings.display_net_prices
                    else v.display_price.gross
                    for v in item.available_variations
                ])
                item.max_price = max([
                    v.display_price.net if event.settings.display_net_prices
                    else v.display_price.gross
                    for v in item.available_variations
                ])

            item._remove = not bool(item.available_variations)

    if not quota_cache_existed and not voucher and not allow_addons and not base_qs_set and not filter_items and not filter_categories:
        event.cache.set(quota_cache_key, quota_cache, 5)
    items = [
        item for item in items
        if (len(item.available_variations) > 0 or not item.has_variations)
        and not item._remove
    ]
    return items, display_add_to_cart
Esempio n. 21
0
    def get_context_data(self, **kwargs):
        context = super().get_context_data(**kwargs)

        if not (self.object.ended or self.can_edit):
            raise Http404()

        queryset = Submission.objects.filter(contest_object=self.object)

        ac_count = Count(
            Case(When(result='AC', then=Value(1)),
                 output_field=IntegerField()))
        ac_rate = CombinedExpression(ac_count / Count('problem'),
                                     '*',
                                     Value(100.0),
                                     output_field=FloatField())

        status_count_queryset = list(
            queryset.values('problem__code', 'result').annotate(
                count=Count('result')).values_list('problem__code', 'result',
                                                   'count'), )
        labels, codes = [], []
        contest_problems = self.object.contest_problems.order_by(
            'order').values_list('problem__name', 'problem__code')
        if contest_problems:
            labels, codes = zip(*contest_problems)
        num_problems = len(labels)
        status_counts = [[] for i in range(num_problems)]
        for problem_code, result, count in status_count_queryset:
            if problem_code in codes:
                status_counts[codes.index(problem_code)].append(
                    (result, count))

        result_data = defaultdict(partial(list, [0] * num_problems))
        for i in range(num_problems):
            for category in _get_result_data(defaultdict(
                    int, status_counts[i]))['categories']:
                result_data[category['code']][i] = category['count']

        stats = {
            'problem_status_count': {
                'labels':
                labels,
                'datasets': [{
                    'label':
                    name,
                    'backgroundColor':
                    settings.DMOJ_STATS_SUBMISSION_RESULT_COLORS[name],
                    'data':
                    data,
                } for name, data in result_data.items()],
            },
            'problem_ac_rate':
            get_bar_chart(
                queryset.values(
                    'contest__problem__order',
                    'problem__name').annotate(ac_rate=ac_rate).order_by(
                        'contest__problem__order').values_list(
                            'problem__name', 'ac_rate'), ),
            'language_count':
            get_pie_chart(
                queryset.values('language__name').annotate(
                    count=Count('language__name')).filter(
                        count__gt=0).order_by('-count').values_list(
                            'language__name', 'count'), ),
            'language_ac_rate':
            get_bar_chart(
                queryset.values('language__name').annotate(
                    ac_rate=ac_rate).filter(ac_rate__gt=0).values_list(
                        'language__name', 'ac_rate'), ),
        }

        context['stats'] = mark_safe(json.dumps(stats))

        return context
Esempio n. 22
0
class ROILink(Model):
    user = ForeignKey(Profile)
    link = URLField()
    label = CharField(max_length=256)
    uuid = UUIDField(default=uuid4)
    visits = IntegerField(default=0)
Esempio n. 23
0
class RunningJob(Model):
    job = ForeignKey('Job', blank=False, on_delete=PROTECT)
    node = ForeignKey(Node, blank=False, on_delete=PROTECT)
    core_num = IntegerField(null=False, blank=False)
    gpu_num = IntegerField(null=False, blank=False)
Esempio n. 24
0
class CourseProgress(Model):
    user = ForeignKey(User, CASCADE)
    course = ForeignKey(Course, CASCADE)
    percent_complete = IntegerField()
Esempio n. 25
0
class Script(CleanSave, TimestampedModel):

    # Force model into the metadataserver namespace.
    class Meta(DefaultMeta):
        pass

    objects = ScriptManager()

    name = CharField(max_length=255, unique=True)

    title = CharField(max_length=255, blank=True)

    description = TextField(blank=True)

    tags = ArrayField(TextField(), blank=True, null=True, default=list)

    script_type = IntegerField(choices=SCRIPT_TYPE_CHOICES,
                               default=SCRIPT_TYPE.TESTING)

    # The hardware the script configures or tests.
    hardware_type = IntegerField(choices=HARDWARE_TYPE_CHOICES,
                                 default=HARDWARE_TYPE.NODE)

    # Whether the script can run in parallel with other scripts.
    parallel = IntegerField(choices=SCRIPT_PARALLEL_CHOICES,
                            default=SCRIPT_PARALLEL.DISABLED)

    # Any results which will be made availble after the script is run.
    results = JSONObjectField(blank=True, default={})

    # Parameters which may be passed to the script and their constraints.
    parameters = JSONObjectField(blank=True, default={})

    # apt, snap, dpkg, to install or archives to extract.
    packages = JSONObjectField(blank=True, default={})

    # 0 is no timeout
    timeout = DurationField(default=datetime.timedelta())

    destructive = BooleanField(default=False)

    # True only if the script is shipped with MAAS
    default = BooleanField(default=False)

    script = OneToOneField(VersionedTextFile, on_delete=CASCADE)

    # A list of hardware identifiers(modalias, PCI id, USB id, or name) this
    # script is applicable to. This script will always run on machines with
    # matching hardware.
    for_hardware = ArrayField(CharField(max_length=255),
                              blank=True,
                              default=list)

    # Whether or not the script may reboot while running. Tells the status
    # monitor to wait until NODE_FAILURE_MONITORED_STATUS_TIMEOUTS before
    # timing out.
    may_reboot = BooleanField(default=False)

    # Only applicable to commissioning scripts. When true reruns commissioning
    # scripts after receiving the result.
    recommission = BooleanField(default=False)

    # Whether or not maas-run-remote-scripts should apply user configured
    # network settings before running the Script.
    apply_configured_networking = BooleanField(default=False)

    @property
    def ForHardware(self):
        """Parses the for_hardware field and returns a ForHardware tuple."""
        modaliases = []
        pci = []
        usb = []
        for descriptor in self.for_hardware:
            try:
                hwtype, value = descriptor.split(":", 1)
            except ValueError:
                continue
            if hwtype == "modalias":
                modaliases.append(value)
            elif hwtype == "pci":
                pci.append(value)
            elif hwtype == "usb":
                usb.append(value)
        return ForHardware(modaliases, pci, usb)

    @property
    def script_type_name(self):
        for script_type, script_type_name in SCRIPT_TYPE_CHOICES:
            if self.script_type == script_type:
                return script_type_name
        return "unknown"

    @property
    def hardware_type_name(self):
        return HARDWARE_TYPE_CHOICES[self.hardware_type][1]

    @property
    def parallel_name(self):
        return SCRIPT_PARALLEL_CHOICES[self.parallel][1]

    def __str__(self):
        return self.name

    def add_tag(self, tag):
        """Add tag to Script."""
        if tag not in self.tags:
            self.tags = self.tags + [tag]

    def remove_tag(self, tag):
        """Remove tag from Script."""
        if tag in self.tags:
            tags = self.tags.copy()
            tags.remove(tag)
            self.tags = tags

    def save(self, *args, **kwargs):
        if self.destructive:
            self.add_tag("destructive")
        else:
            self.remove_tag("destructive")

        for hw_type, hw_type_label in HARDWARE_TYPE_CHOICES:
            if hw_type == self.hardware_type:
                self.add_tag(hw_type_label.lower())
            else:
                self.remove_tag(hw_type_label.lower())

        return super().save(*args, **kwargs)
Esempio n. 26
0
 def __init__(self, expression, output_field=None, **extra):
     if output_field is None:
         output_field = IntegerField()
     super(ExtractDay, self).__init__(expression,
                                      output_field=output_field,
                                      **extra)
Esempio n. 27
0
 def output_field(self):
     return IntegerField()
Esempio n. 28
0
class BootResource(CleanSave, TimestampedModel):
    """Boot resource.

    Each `BootResource` represents a os/series combination or custom uploaded
    image that maps to a specific architecture that a node can use to
    commission or install.

    `BootResource` can have multiple `BootResourceSet` corresponding to
    different versions of this `BootResource`. When a node selects this
    `BootResource` the newest `BootResourceSet` is used to deploy to the node.

    :ivar rtype: Type of `BootResource`. See the vocabulary
        :class:`BOOT_RESOURCE_TYPE`.
    :ivar name: Name of the `BootResource`. If its BOOT_RESOURCE_TYPE.UPLOADED
        then `name` is used to reference this image. If its
        BOOT_RESOURCE_TYPE.SYCNED or BOOT_RESOURCE_TYPE.GENERATED then its
        in the format of os/series.
    :ivar architecture: Architecture of the `BootResource`. It must be in
        the format arch/subarch.
    :ivar extra: Extra information about the file. This is only used
        for synced Ubuntu images.
    """
    class Meta(DefaultMeta):
        unique_together = (("name", "architecture"), )

    objects = BootResourceManager()

    rtype = IntegerField(choices=BOOT_RESOURCE_TYPE_CHOICES, editable=False)

    name = CharField(max_length=255, blank=False)

    architecture = CharField(max_length=255,
                             blank=False,
                             validators=[validate_architecture])

    bootloader_type = CharField(max_length=32, blank=True, null=True)

    kflavor = CharField(max_length=32, blank=True, null=True)

    # The hwe-rolling kernel is a meta-package which depends on the latest
    # kernel available. Instead of placing a duplicate kernel in the stream
    # SimpleStreams adds a boolean field to indicate that the hwe-rolling
    # kernel meta-package points to this kernel. When the rolling field is set
    # true MAAS allows users to deploy the hwe-rolling kernel by using this
    # BootResource kernel and instructs Curtin to install the meta-package.
    rolling = BooleanField(blank=False, null=False, default=False)

    extra = JSONObjectField(blank=True, default="", editable=False)

    def __str__(self):
        return "<BootResource name=%s, arch=%s, kflavor=%s>" % (
            self.name,
            self.architecture,
            self.kflavor,
        )

    @property
    def display_rtype(self):
        """Return rtype text as displayed to the user."""
        return BOOT_RESOURCE_TYPE_CHOICES_DICT[self.rtype]

    def clean(self):
        """Validate the model.

        Checks that the name is in a valid format, for its type.
        """
        if self.rtype == BOOT_RESOURCE_TYPE.UPLOADED:
            if "/" in self.name:
                os_name = self.name.split("/")[0]
                osystem = OperatingSystemRegistry.get_item(os_name)
                if osystem is None:
                    raise ValidationError(
                        "%s boot resource cannot contain a '/' in it's name "
                        "unless it starts with a supported operating system." %
                        (self.display_rtype))
        elif self.rtype in RTYPE_REQUIRING_OS_SERIES_NAME:
            if "/" not in self.name:
                raise ValidationError(
                    "%s boot resource must contain a '/' in it's name." %
                    (self.display_rtype))

    def unique_error_message(self, model_class, unique_check):
        if unique_check == ("name", "architecture"):
            return "Boot resource of name, and architecture already exists."
        return super().unique_error_message(model_class, unique_check)

    def get_latest_set(self):
        """Return latest `BootResourceSet`."""
        if (not hasattr(self, "_prefetched_objects_cache")
                or "sets" not in self._prefetched_objects_cache):
            return self.sets.order_by("id").last()
        elif self.sets.all():
            return sorted(self.sets.all(), key=attrgetter("id"),
                          reverse=True)[0]
        else:
            return None

    def get_latest_complete_set(self):
        """Return latest `BootResourceSet` where all `BootResouceFile`'s
        are complete."""
        if (not hasattr(self, "_prefetched_objects_cache")
                or "sets" not in self._prefetched_objects_cache):
            resource_sets = self.sets.order_by("-id").annotate(
                files_count=Count("files__id"),
                files_size=Sum("files__largefile__size"),
                files_total_size=Sum("files__largefile__total_size"),
            )
        else:
            resource_sets = sorted(self.sets.all(),
                                   key=attrgetter("id"),
                                   reverse=True)
        for resource_set in resource_sets:
            if (resource_set.files_count > 0 and resource_set.files_size
                    == resource_set.files_total_size):
                return resource_set
        return None

    def split_arch(self):
        return self.architecture.split("/")

    def get_next_version_name(self):
        """Return the version a `BootResourceSet` should use when adding to
        this resource.

        The version naming is specific to how the resource sets will be sorted
        by simplestreams. The version name is YYYYmmdd, with an optional
        revision index. (e.g. 20140822.1)

        This method gets the current date, and checks if a revision already
        exists in the database. If it doesn't then just the current date is
        returned. If it does exists then the next revision in the set for that
        date will be returned.

        :return: Name of version to use for a new set on this `BootResource`.
        :rtype: string
        """
        version_name = now().strftime("%Y%m%d")
        sets = self.sets.filter(
            version__startswith=version_name).order_by("version")
        if not sets.exists():
            return version_name
        max_idx = 0
        for resource_set in sets:
            if "." in resource_set.version:
                _, set_idx = resource_set.version.split(".")
                set_idx = int(set_idx)
                if set_idx > max_idx:
                    max_idx = set_idx
        return "%s.%d" % (version_name, max_idx + 1)

    def supports_subarch(self, subarch):
        """Return True if the resource supports the given subarch."""
        _, self_subarch = self.split_arch()
        if subarch == self_subarch:
            return True
        if "subarches" not in self.extra:
            return False
        subarches = self.extra["subarches"].split(",")
        return subarch in subarches
Esempio n. 29
0
class Topic(BasePage):
    resource_type = "topic"
    parent_page_types = ["Topics"]
    subpage_types = ["Topic"]
    template = "topic.html"

    # Content fields
    description = RichTextField(
        blank=True,
        default="",
        features=RICH_TEXT_FEATURES_SIMPLE,
        help_text="Optional short text description, max. 400 characters",
        max_length=400,
    )
    featured = StreamField(
        StreamBlock(
            [
                (
                    "article",
                    PageChooserBlock(target_model=(
                        "articles.Article",
                        "externalcontent.ExternalArticle",
                    )),
                ),
                ("external_page", FeaturedExternalBlock()),
            ],
            max_num=4,
            required=False,
        ),
        null=True,
        blank=True,
        help_text="Optional space for featured articles, max. 4",
    )
    tabbed_panels = StreamField(
        StreamBlock([("panel", TabbedPanelBlock())], max_num=3,
                    required=False),
        null=True,
        blank=True,
        help_text=
        "Optional tabbed panels for linking out to other resources, max. 3",
        verbose_name="Tabbed panels",
    )
    latest_articles_count = IntegerField(
        choices=RESOURCE_COUNT_CHOICES,
        default=3,
        help_text="The number of articles to display for this topic.",
    )

    # Card fields
    card_title = CharField("Title", max_length=140, blank=True, default="")
    card_description = TextField("Description",
                                 max_length=400,
                                 blank=True,
                                 default="")
    card_image = ForeignKey(
        "mozimages.MozImage",
        null=True,
        blank=True,
        on_delete=SET_NULL,
        related_name="+",
        verbose_name="Image",
    )

    # Meta
    icon = FileField(upload_to="topics/icons", blank=True, default="")
    color = CharField(max_length=14, choices=COLOR_CHOICES, default="blue-40")
    keywords = ClusterTaggableManager(through=TopicTag, blank=True)

    # Content panels
    content_panels = BasePage.content_panels + [
        FieldPanel("description"),
        StreamFieldPanel("featured"),
        StreamFieldPanel("tabbed_panels"),
        FieldPanel("latest_articles_count"),
        MultiFieldPanel(
            [InlinePanel("people")],
            heading="People",
            help_text=
            "Optional list of people associated with this topic as experts",
        ),
    ]

    # Card panels
    card_panels = [
        FieldPanel("card_title"),
        FieldPanel("card_description"),
        ImageChooserPanel("card_image"),
    ]

    # Meta panels
    meta_panels = [
        MultiFieldPanel(
            [
                InlinePanel("parent_topics", label="Parent topic(s)"),
                InlinePanel("child_topics", label="Child topic(s)"),
            ],
            heading="Parent/child topic(s)",
            classname="collapsible collapsed",
            help_text=("Topics with no parent (i.e. top-level topics) will be "
                       "listed on the home page. Child topics are listed "
                       "on the parent topic’s page."),
        ),
        MultiFieldPanel(
            [FieldPanel("icon"), FieldPanel("color")],
            heading="Theme",
            help_text=(
                "Theme settings used on topic page and any tagged content. "
                "For example, an article tagged with this topic "
                "will use the color specified here as its accent color."),
        ),
        MultiFieldPanel(
            [
                FieldPanel("seo_title"),
                FieldPanel("search_description"),
                ImageChooserPanel("social_image"),
                FieldPanel("keywords"),
            ],
            heading="SEO",
            help_text=("Optional fields to override the default "
                       "title and description for SEO purposes"),
        ),
    ]

    # Settings panels
    settings_panels = [FieldPanel("slug"), FieldPanel("show_in_menus")]

    # Tabs
    edit_handler = TabbedInterface([
        ObjectList(content_panels, heading="Content"),
        ObjectList(card_panels, heading="Card"),
        ObjectList(meta_panels, heading="Meta"),
        ObjectList(settings_panels, heading="Settings", classname="settings"),
    ])

    @property
    def articles(self):
        return get_combined_articles(self, topics__topic__pk=self.pk)

    @property
    def events(self):
        """Return upcoming events for this topic,
        ignoring events in the past, ordered by start date"""
        return get_combined_events(self,
                                   topics__topic__pk=self.pk,
                                   start_date__gte=datetime.datetime.now())

    @property
    def experts(self):
        """Return Person instances for topic experts"""
        return [person.person for person in self.people.all()]

    @property
    def videos(self):
        """Return the latest videos and external videos for this topic. """
        return get_combined_videos(self, topics__topic__pk=self.pk)

    @property
    def color_value(self):
        return dict(COLOR_VALUES)[self.color]

    @property
    def subtopics(self):
        return [topic.child for topic in self.child_topics.all()]
Esempio n. 30
0
def updatecart(request, order_id):
    print(order_id)
    current_orderproduct = Order_Product.objects.filter(id = order_id).first()
    quantity = request.data.get("quantity",)
    product = Product.objects.filter(id =current_orderproduct.product.id).first()
    amount = product.unit_cost
    if not quantity:
        return Response(
            data={
                "Message": "Make Sure All The Fields Are Included"
            },
            status=status.HTTP_400_BAD_REQUEST
        )

    current_orderproduct.quantity = quantity
    current_orderproduct.total = float(amount)*float(quantity)
    current_orderproduct.save()
 
    products = []
    r_token = request.META['HTTP_AUTHORIZATION']
    new_token = r_token.split(' ', 1)[1]
    token = Token.objects.filter(key=new_token).first()
    buyer = Buyer.objects.filter(user_ptr_id=token.user.id).first()
    cart_items = Order_Product.objects.filter(buyer=buyer, checkout__isnull=True)
    for order in cart_items:
        if order.quantity is not None:
            print(order.quantity)
            product = Product.objects.filter(id=order.product.id).annotate(order_id=Sum(order.id, output_field=IntegerField()),quantity=Sum(order.quantity, output_field=FloatField()), total=Sum(order.total, output_field=FloatField())).first()
            print(product.quantity)
            products.append(product)
    data = CustomCartSerializer(products, many=True)    
    context = {
        'data': data.data
    }

    return Response(data.data,status=status.HTTP_200_OK)