Esempio n. 1
0
    def test_should_be_accepted_by_django(self):
        """Test that a django field really accept a ``Choices`` instance."""

        from django.db.models import IntegerField
        field = IntegerField(choices=self.MY_CHOICES, default=self.MY_CHOICES.ONE)

        self.assertEqual(field.choices, self.MY_CHOICES)

        # No errors in ``_check_choices_``, Django 1.7+
        if django.VERSION >= (1, 7):
            self.assertEqual(field._check_choices(), [])

        # Test validation
        field.validate(1, None)

        with self.assertRaises(ValidationError) as raise_context:
            field.validate(4, None)

        # Check exception code, only in Django 1.6+
        if django.VERSION >= (1, 6):
            self.assertEqual(raise_context.exception.code, 'invalid_choice')
Esempio n. 2
0
        rhs, rhs_params = self.process_rhs(compiler, connection)
        params = lhs_params + rhs_params
        return '%s <> %s' % (lhs, rhs), params


from django.db.models import Transform

class AbsoluteValue(Transform):
    lookup_name = 'abs'

    def as_sql(self, compiler, connection):
        lhs, params = compiler.compile(self.lhs)
        return "ABS(%s)" % lhs, params

from django.db.models import IntegerField
IntegerField.register_lookup(AbsoluteValue)
#*******************************

from django.db.models import Transform, FloatField, Lookup

class NegativeValue(Transform):
    lookup_name = 'ne'
    def as_sql(self, compiler, connection):
        lhs, params = compiler.compile(self.lhs) #Сравнить с process_lhs
        return '-1 * %s' % lhs, params

    @property
    def output_field(self):
        return IntegerField()

Esempio n. 3
0
 def __init__(self, expression, **extra):
     output_field = extra.pop('output_field', IntegerField())
     super(Length, self).__init__(expression,
                                  output_field=output_field,
                                  **extra)
Esempio n. 4
0
def stats_by_dtxsids(dtxs):
    """
    PUCS.n
    The number of unique PUCs (product categories) the chemical is associated with
    datadocs.n
    "The number of data documents (e.g.  MSDS, SDS, ingredient list, product label)
    the chemical is appears in"
    datadocs_w_wf.n
    "The number of data documents with associated weight fraction data
    that the chemical appears in (weight fraction data may be reported or predicted data,
     i.e., predicted from an ingredient list)"
    products.n
    "The number of products the chemical appears in, where a product is defined as a
    product entry in Factotum."
    """
    # print('List of DTXSIDs provided:')
    # print(dtxs)


    # The number of unique PUCs (product categories) the chemical is associated with
    pucs_n = DSSToxSubstance.objects.filter(sid__in=dtxs).\
        values('sid').annotate(pucs_n=Count('extracted_chemical__extracted_text__data_document__product__puc')).values('sid','pucs_n')
    #print('pucs_n:')
    #print(pucs_n)

    # "The number of data documents (e.g.  MSDS, SDS, ingredient list, product label)
    # the chemical appears in
    dds_n = DSSToxSubstance.objects.filter(sid__in=dtxs).values('sid').\
        annotate(dds_n=Count('extracted_chemical__extracted_text__data_document')).values('sid','dds_n')
    #print('dds_n:')
    #print(dds_n)

    # The number of data documents with associated weight fraction data
    # that the chemical appears in (weight fraction data may be reported or predicted data,
    # i.e., predicted from an ingredient list)
    dds_wf_n = DSSToxSubstance.objects\
    .filter(sid__in=dtxs).values('sid')\
    .annotate(
        dds_wf_n = Subquery(
            ExtractedChemical
            .objects
            .filter(pk=OuterRef('extracted_chemical_id') )
            .filter(
                Q(raw_max_comp__isnull=False) |
                Q(raw_min_comp__isnull=False) |
                Q(raw_central_comp__isnull=False)
            )
            .values('extracted_text_id')
            .annotate(dds_wf_n=Count('extracted_text_id') )
            .values('dds_wf_n')
        )
    )

    dds_wf_n = {}
    strsql = ("SELECT dss.sid , "
                "IFNULL("
                    "SUM( "
                        "(SELECT Count(DISTINCT ec2.extracted_text_id) as dd_wf_id "
                        "FROM dashboard_extractedchemical ec2 "
                        "WHERE ec2.id = dss.extracted_chemical_id "
                        "GROUP BY ec2.extracted_text_id "
                        "HAVING SUM( ( "
                            "(ec2.raw_max_comp IS NULL) +  "
                            "(ec2.raw_min_comp IS NULL) +  "
                            "(ec2.raw_central_comp IS NULL) "
                            ") = 0) > 0 )) "
                            ",0 "
                            ") as dds_wf_n "
                            "FROM dashboard_dsstoxsubstance dss "
                            "LEFT JOIN dashboard_extractedchemical ec "
                            "on ec.id = dss.extracted_chemical_id "
                            "GROUP BY dss.sid ")
    cursor_dds_wf_n = connection.cursor()
    cursor_dds_wf_n.execute(strsql)
    col_names = [desc[0] for desc in cursor_dds_wf_n.description]

    #print('dds_wf_n:')
    for row in cursor_dds_wf_n:
        #print('sid: %s      dds_wf_n: %i' % (row[0], row[1]))
        if row[0] in dtxs:
            #print('adding to dds_wf_n')
            dds_wf_n[row[0]] = row[1]


    # The number of products the chemical appears in, where a product is defined as a
    # product entry in Factotum.
    products_n = DSSToxSubstance.objects.filter(sid__in=dtxs).values('sid').\
       annotate(products_n=Count('extracted_chemical__extracted_text__data_document__product')).values('sid', 'products_n')


    stats = pucs_n\
    .annotate(dds_n=Value(-1, output_field=IntegerField())) \
    .annotate(dds_wf_n=Value(-1, output_field=IntegerField())) \
    .annotate(products_n=Value(-1, output_field=IntegerField()))

    for row in stats:
        row['dds_n'] = int(dds_n.get(sid=row['sid'])['dds_n'] or 0)
        row['dds_wf_n'] = dds_wf_n[row['sid']]
        row['products_n'] = int(products_n.get(sid=row['sid'])['products_n'] or 0)

    return stats
Esempio n. 5
0
 def get_stat_info(self):
     return self.crawler.values('created_by').annotate(
         completed=Sum('is_complete', output_field=IntegerField()),
         successful=Sum('is_successful', output_field=IntegerField()),
         loaded=Sum('is_loaded', output_field=IntegerField()))
    def order_by(self, *fields):
        # Django only allows to order querysets by direct fields and
        # foreign-key chains. In order to bypass this behaviour and order
        # by EAV attributes, it is required to construct custom order-by
        # clause manually using Django's conditional expressions.
        # This will be slow, of course.
        order_clauses = []
        query_clause = self
        config_cls = self.model._eav_config_cls

        for term in [t.split('__') for t in fields]:
            # Continue only for EAV attributes.
            if len(term) == 2 and term[0] == config_cls.eav_attr:
                # Retrieve Attribute over which the ordering is performed.
                try:
                    attr = Attribute.objects.get(slug=term[1])
                except ObjectDoesNotExist:
                    raise ObjectDoesNotExist(
                        'Cannot find EAV attribute "{}"'.format(term[1]))

                field_name = 'value_%s' % attr.datatype

                pks_values = Value.objects.filter(
                    # Retrieve pk-values pairs of the related values
                    # (i.e. values for the specified attribute and
                    # belonging to entities in the queryset).
                    attribute__slug=attr.slug,
                    entity_id__in=self
                ).order_by(
                    # Order values by their value-field of
                    # appriopriate attribute data-type.
                    field_name
                ).values_list(
                    # Retrieve only primary-keys of the entities
                    # in the current queryset.
                    'entity_id',
                    field_name)

                # Retrive ordered values from pk-value list.
                _, ordered_values = zip(*pks_values)

                # Add explicit ordering and turn
                # list of pairs into look-up table.
                val2ind = dict(zip(ordered_values, count()))

                # Finally, zip ordered pks with their grouped orderings.
                entities_pk = [(pk, val2ind[val]) for pk, val in pks_values]

                # Using ordered primary-keys, construct
                # CASE clause of the form:
                #
                #     CASE
                #         WHEN id = 2 THEN 1
                #         WHEN id = 5 THEN 2
                #         WHEN id = 9 THEN 2
                #         WHEN id = 4 THEN 3
                #     END
                #
                when_clauses = [When(id=pk, then=i) for pk, i in entities_pk]

                order_clause = Case(*when_clauses, output_field=IntegerField())

                clause_name = '__'.join(term)
                # Use when-clause to construct
                # custom order-by clause.
                query_clause = query_clause.annotate(
                    **{clause_name: order_clause})

                order_clauses.append(clause_name)

            elif len(term) >= 2 and term[0] == config_cls.eav_attr:
                raise NotSupportedError(
                    'EAV does not support ordering through '
                    'foreign-key chains')

            else:
                order_clauses.append(term[0])

        return QuerySet.order_by(query_clause, *order_clauses)
Esempio n. 7
0
def user_event_widgets(**kwargs):
    user = kwargs.pop('user')
    widgets = []

    tpl = """
        <a href="{url}" class="event">
            <div class="name">{event}</div>
            <div class="daterange">{daterange}</div>
            <div class="times">{times}</div>
        </a>
        <div class="bottomrow">
            {orders}
            <a href="{url}" class="status-{statusclass}">
                {status}
            </a>
        </div>
    """

    active_orders = Order.objects.filter(
        event=OuterRef('pk'),
        status__in=[
            Order.STATUS_PENDING, Order.STATUS_PAID
        ]).order_by().values('event').annotate(c=Count('*')).values('c')

    required_actions = RequiredAction.objects.filter(event=OuterRef('pk'),
                                                     done=False)

    # Get set of events where we have the permission to show the # of orders
    events_with_orders = set(
        Event.objects.filter(
            Q(organizer_id__in=user.teams.filter(
                all_events=True, can_view_orders=True).values_list('organizer',
                                                                   flat=True))
            | Q(id__in=user.teams.filter(can_view_orders=True).values_list(
                'limit_events__id', flat=True))).values_list('id', flat=True))

    events = user.get_events_with_any_permission().annotate(
        order_count=Subquery(active_orders, output_field=IntegerField()),
        has_ra=Exists(required_actions)).annotate(
            min_from=Min('subevents__date_from'),
            max_from=Max('subevents__date_from'),
            max_to=Max('subevents__date_to'),
            max_fromto=Greatest(
                Max('subevents__date_to'),
                Max('subevents__date_from'))).annotate(
                    order_from=Coalesce('min_from', 'date_from'),
                    order_to=Coalesce('max_fromto', 'max_to', 'max_from',
                                      'date_to'),
                ).order_by('-order_from', 'name').prefetch_related(
                    '_settings_objects',
                    'organizer___settings_objects').select_related(
                        'organizer')[:100]
    for event in events:
        dr = event.get_date_range_display()
        tz = pytz.timezone(event.settings.timezone)
        if event.has_subevents:
            dr = daterange((event.min_from).astimezone(tz),
                           (event.max_fromto or event.max_to
                            or event.max_from).astimezone(tz))

        if event.has_ra:
            status = ('danger', _('Action required'))
        elif not event.live:
            status = ('warning', _('Shop disabled'))
        elif event.presale_has_ended:
            status = ('default', _('Sale over'))
        elif not event.presale_is_running:
            status = ('default', _('Soon'))
        else:
            status = ('success', _('On sale'))

        widgets.append({
            'content':
            tpl.format(
                event=escape(event.name),
                times=_('Event series') if event.has_subevents else
                (((date_format(event.date_admission.astimezone(tz),
                               'TIME_FORMAT') + ' / ') if event.date_admission
                  and event.date_admission != event.date_from else '') +
                 (date_format(event.date_from.astimezone(tz), 'TIME_FORMAT')
                  if event.date_from else '')),
                url=reverse('control:event.index',
                            kwargs={
                                'event': event.slug,
                                'organizer': event.organizer.slug
                            }),
                orders=(
                    '<a href="{orders_url}" class="orders">{orders_text}</a>'.
                    format(orders_url=reverse('control:event.orders',
                                              kwargs={
                                                  'event': event.slug,
                                                  'organizer':
                                                  event.organizer.slug
                                              }),
                           orders_text=ungettext(
                               '{num} order', '{num} orders', event.order_count
                               or 0).format(num=event.order_count or 0))
                    if user.is_superuser or event.pk in events_with_orders else
                    ''),
                daterange=dr,
                status=status[1],
                statusclass=status[0],
            ),
            'display_size':
            'small',
            'priority':
            100,
            'container_class':
            'widget-container widget-container-event',
        })
        """
            {% if not e.live %}
                <span class="label label-danger">{% trans "Shop disabled" %}</span>
            {% elif e.presale_has_ended %}
                <span class="label label-warning">{% trans "Presale over" %}</span>
            {% elif not e.presale_is_running %}
                <span class="label label-warning">{% trans "Presale not started" %}</span>
            {% else %}
                <span class="label label-success">{% trans "On sale" %}</span>
            {% endif %}
        """
    return widgets
Esempio n. 8
0
def review(request):
    rate = Review.objects.all()
    if request.method == 'POST':
        f = ReviewForm(request.POST)
        review_details = Review.objects.order_by('pub_date')
        if f.is_valid():
            F=f.save(commit=False)
            try:
                obj1 = Review.objects.get(firstname = F.firstname, industry_id = F.industry_id)
                obj1.rating = F.rating
                obj1.pub_date = F.pub_date
                obj1.user_name = F.user_name
                obj1.comment = F.comment
                obj1.save()
            except:
                F.save()
        a = []
        industry_dict = Industry.objects.all()
        count = Industry.objects.all().count()
        for i in range(0,count):
            b = Review.objects.filter(industry_id=industry_dict[i]).aggregate(Avg('rating',output_field=IntegerField()))
            a.append(b)
        print(a)
        return render(request,'temp/rating.html',{'doc':industry_dict,'rating':a})
    else:
        f = ReviewForm()
        return render(request,'temp/review.html',{'form':f})
Esempio n. 9
0
class Reference(Model):
    number = IntegerField()
    source = CharField(max_length=255)
    page = ForeignKey('wiki.WikiPage')
Esempio n. 10
0
class BlogIndexPage(Page):
    footer = models.ForeignKey('common.Footer',
                               default=DEFAULT_FOOTER_ID,
                               null=True,
                               blank=True,
                               on_delete=SET_NULL,
                               related_name='+')

    menu_order = IntegerField(
        blank=True,
        default=1,
        help_text=(
            'The order this page should appear in the menu. '
            'The lower the number, the more left the page will appear. '
            'This is required for all pages where "Show in menus" is checked.'
        ))

    promote_panels = Page.promote_panels + [
        FieldPanel('menu_order'),
    ]

    content_panels = Page.content_panels + [
        SnippetChooserPanel('footer'),
    ]

    @property
    def blogs(self):
        # Get list of blog pages that are descendants of this page
        blogs = BlogPage.objects.descendant_of(self).live()
        blogs = blogs.order_by('-date').select_related(
            'owner').prefetch_related(
                'tagged_items__tag',
                'categories',
                'categories__category',
            )
        return blogs

    def get_context(self,
                    request,
                    tag=None,
                    category=None,
                    author=None,
                    *args,
                    **kwargs):
        context = super(BlogIndexPage,
                        self).get_context(request, *args, **kwargs)
        blogs = self.blogs

        if tag is None:
            tag = request.GET.get('tag')
        if tag:
            blogs = blogs.filter(tags__slug=tag)
        if category is None:  # Not coming from category_view in views.py
            if request.GET.get('category'):
                category = get_object_or_404(BlogCategory,
                                             slug=request.GET.get('category'))
        if category:
            if not request.GET.get('category'):
                category = get_object_or_404(BlogCategory, slug=category)
            blogs = blogs.filter(categories__category__name=category)
        if author:
            if isinstance(author, str) and not author.isdigit():
                blogs = blogs.filter(author__username=author)
            else:
                blogs = blogs.filter(author_id=author)

        # Pagination
        page = request.GET.get('page')
        page_size = 10
        if hasattr(settings, 'BLOG_PAGINATION_PER_PAGE'):
            page_size = settings.BLOG_PAGINATION_PER_PAGE

        if page_size is not None:
            paginator = Paginator(blogs, page_size)  # Show 10 blogs per page
            try:
                blogs = paginator.page(page)
            except PageNotAnInteger:
                blogs = paginator.page(1)
            except EmptyPage:
                blogs = paginator.page(paginator.num_pages)

        context['blogs'] = blogs
        context['category'] = category
        context['tag'] = tag
        context['author'] = author
        context['COMMENTS_APP'] = COMMENTS_APP
        context = get_blog_context(context)

        return context

    class Meta:
        verbose_name = _('Blog index')

    subpage_types = ['BlogPage']
Esempio n. 11
0
class ExamMarks(Model):
    SYMBOL_MARK = (
        ('A', 'A'),
        ('B', 'B'),
        ('C', 'C'),
        ('D', 'D'),
        ('E', 'E'),
        ('F', 'F'),
        ('FX', 'FX'),
    )

    MARKS = ((0, "Неявка"), (1, "Индивидуальный план"),
             (2, "Неудовлетворительно"), (3, "Удовлетворительно"),
             (4, "Хорошо"), (5, "Отлично"), (6, "Зачтено"), (7, "Не зачтено"),
             (8, "Не допущен"), (9, "---"))

    exam = ForeignKey(Exam, db_index=True, on_delete=CASCADE)
    student = ForeignKey(Student, db_index=True, on_delete=CASCADE)
    inPoints = FloatField(verbose_name="баллы за срез", max_length=255)
    additional_points = FloatField(verbose_name="баллы за отработку",
                                   blank=True,
                                   null=True,
                                   max_length=255)
    examPoints = FloatField(verbose_name="баллы за экзамен",
                            blank=True,
                            null=True,
                            max_length=255)
    mark = IntegerField(verbose_name='Оценка',
                        choices=MARKS,
                        db_index=True,
                        default=0)
    mark_symbol = CharField('буквенный эквивалент оценки',
                            max_length=2,
                            default='')

    class Meta:
        verbose_name = 'экзаменационная оценка'
        verbose_name_plural = 'экзаменационные оценки'

    # def get_for_semester(self, semester):
    # {% for exammark in student.exammarks_set.all %}
    #     {% if exammark.exam.course.discipline_detail.semester ==  semester %}
    #    return self.exam.course.discipline_detail.semester

    def __str__(self):
        return self.student.FIO + ' - ' + self.exam.course.discipline_detail.discipline.name + ' - ' + self.MARKS[
            self.mark][1]

    @property
    def total_points(self):
        return sum([self.inPoints, self.additional_points, self.examPoints])

    def get_mark_symbol(self):
        points = self.total_points
        symbol = ExamMarks.SYMBOL_MARK[6][0]
        if 25 <= points < 55:
            symbol = ExamMarks.SYMBOL_MARK[5][0]
        elif 55 <= points < 65:
            symbol = ExamMarks.SYMBOL_MARK[4][0]
        elif 65 <= points < 75:
            symbol = ExamMarks.SYMBOL_MARK[3][0]
        elif 75 <= points < 85:
            symbol = ExamMarks.SYMBOL_MARK[2][0]
        elif 85 <= points < 95:
            symbol = ExamMarks.SYMBOL_MARK[1][0]
        elif 95 <= points <= 100:
            symbol = ExamMarks.SYMBOL_MARK[0][0]
        return symbol

    def get_control_mark(self):
        points = self.total_points
        mark = 9
        if self.exam.controlType != Control.NONE and self.exam.controlType != Control.CREDIT:
            if 0 <= points < 45 and self.exam.controlType == Control.EXAM:
                mark = 8
            elif 45 <= points < 55:
                mark = 2
            elif 55 <= points < 65:
                mark = 3
            elif 65 <= points < 85:
                mark = 4
            elif 85 <= points <= 100:
                mark = 5
        elif self.exam.controlType == Control.CREDIT:
            mark = 6
            if 0 <= points < 60:
                mark = 7
        return mark

    def save(self, *args, **kwargs):
        if self.mark > 1 and self.mark != 8:
            if self.exam.controlType == Control.CREDIT:
                self.examPoints = 0
            self.mark = self.get_control_mark()
            self.mark_symbol = self.get_mark_symbol()
        else:
            self.examPoints = 0
        super().save(*args, **kwargs)

    @property
    def mark_to_text(self):
        return self.MARKS[self.mark][1]
Esempio n. 12
0
class DisciplineDetails(Model):
    discipline = models.ForeignKey(Discipline,
                                   related_name='detail',
                                   verbose_name="дисциплина",
                                   db_index=True,
                                   on_delete=CASCADE)
    semester = ForeignKey('Semester',
                          verbose_name="семестр",
                          db_index=True,
                          null=True,
                          on_delete=models.CASCADE)
    Credit = IntegerField(verbose_name="ЗЕТ",
                          db_index=True,
                          blank=True,
                          null=True)
    Lecture = IntegerField(verbose_name="количество лекции",
                           db_index=True,
                           blank=True,
                           null=True)
    Practice = IntegerField(verbose_name="количество практики",
                            db_index=True,
                            blank=True,
                            null=True)
    Lab = IntegerField(verbose_name="количество лабораторных работ",
                       db_index=True,
                       blank=True,
                       null=True)
    KSR = IntegerField(
        verbose_name="количество контрольно-самостоятельных работ",
        db_index=True,
        blank=True,
        null=True)
    SRS = IntegerField(verbose_name="количество срс",
                       db_index=True,
                       blank=True,
                       null=True)

    class Meta:
        verbose_name = 'вариант дисциплины'
        verbose_name_plural = 'вариант дисциплины'
        unique_together = (('discipline', 'semester'), )

    def __str__(self):
        return self.discipline.name + ' - ' + self.semester.name + ' семестр'

    @property
    def total_hours(self):
        exam_hours = 0
        for control in self.control_set:
            if control.control_type == Control.EXAM:
                exam_hours = 36
                break
        return self.Lecture + self.Practice + self.Lab + self.KSR + self.SRS + exam_hours

    @property
    def controls(self):
        res1 = list(self.control_set.all().values_list('control_type',
                                                       flat=True))
        res2 = map(lambda x: Control.CONTROL_FORM[x][1], res1)
        return ', '.join(res2)

    @property
    def controls_list(self):
        return [(control.id, Control.CONTROL_FORM[control.control_type][1])
                for control in self.control_set.all()]
Esempio n. 13
0
class Specialization(Model):

    UNDEFINED = 0

    SPECIALIST = 1
    BACHELOR = 2
    MASTER = 3
    BACHELOR_ACADEMIC = 4
    BACHELOR_APPLIED = 5
    MASTER_ACADEMIC = 6
    MASTER_APPLIED = 7
    POSTGRADUATE = 8
    INTERN = 9

    QUALIFICATION = (
        (UNDEFINED, '—'),
        (SPECIALIST, 'специалитет'),
        (BACHELOR, 'бакалавриат'),
        (MASTER, 'магистратура'),
        (BACHELOR_ACADEMIC, 'академический бакалавриат'),
        (BACHELOR_APPLIED, 'прикладной бакалавриат'),
        (MASTER_ACADEMIC, 'академическая магистратура'),
        (MASTER_APPLIED, 'прикладная магистратура'),
        (POSTGRADUATE, 'аспирантура'),
        (INTERN, 'ординатура'),
    )

    MIDDLE_PROFESSIONAL = 1
    HIGHER_PROFESSIONAL = 2

    EDUCATION_LEVEL = (
        (UNDEFINED, '—'),
        (MIDDLE_PROFESSIONAL, 'среднее профессиональное образование'),
        (HIGHER_PROFESSIONAL, 'высшее профессиональное образование'),
    )

    name = CharField(verbose_name="название специализации",
                     max_length=200,
                     db_index=True)
    brief_name = CharField(verbose_name="короткое имя специализации",
                           max_length=50,
                           db_index=True,
                           blank=True,
                           null=True)
    code = CharField(verbose_name="код специализации",
                     max_length=100,
                     db_index=True,
                     unique=True)
    qual = IntegerField("квалификация",
                        choices=QUALIFICATION,
                        blank=True,
                        default=0)
    level = IntegerField("уровень образования",
                         choices=EDUCATION_LEVEL,
                         blank=True,
                         default=0)

    class Meta:
        verbose_name = 'специализация'
        verbose_name_plural = 'специализации'

    def __str__(self):
        return self.name
Esempio n. 14
0
    def get_queryset(self):
        params = self.request.GET
        queryset = TourOperator.objects.all()
        parks = params.getlist('parks[]', []) + params.getlist('parks', [])
        countries = params.getlist('countries[]', []) + params.getlist(
            'countries', [])
        from_date = params.get('from_date', None)
        luxury_focus = params.get('luxury_focus', None)
        rating = params.get('rating', None)
        languages = params.getlist('languages[]', []) + params.getlist(
            'languages', [])
        headquarters = params.getlist('headquarters[]', []) + params.getlist(
            'headquarters', [])
        that = params.getlist('that[]', []) + params.getlist('that', [])
        ordering = params.get('ordering', '-yas_score')
        # slug. if load with api
        slug = json.loads(params.get('slug', 'null'))

        # slug. if load page and render from server
        if hasattr(self, 'slug') and self.slug:
            slug = self.slug

        if slug:
            if slug['model'] == 'CountryIndex':
                countries = [slug['id']] + countries
            if slug['model'] == 'Park': parks = [slug['id']] + parks

        if len(parks) or len(countries):
            queryset = queryset.filter(
                Q(country_indexes__in=countries) | Q(parks__in=parks))

        if luxury_focus:
            queryset = queryset.filter(luxury_level=luxury_focus)

        if rating:
            queryset = queryset.filter(average_rating__gte=rating)

        if len(languages):
            queryset = queryset.filter(languages__in=languages)

        if len(headquarters):
            queryset = queryset.filter(headquarters__in=headquarters)

        if len(that):
            that_choices = TourOperator.operator_that_choices()
            for t in that:
                if t in that_choices:
                    rule = that_choices[t]
                    if rule['annotate']:
                        queryset = queryset.annotate(
                            subquery_alias=rule['annotate'])
                    queryset = queryset.filter(rule['query'])

        queryset = queryset.distinct()
        queryset = queryset.annotate(new_yas_score=F('yas_score'))
        if ordering and ordering == '-yas_score':
            #yas score by country
            if len(parks) or len(countries):
                parks_countries = Park.objects.filter(
                    id__in=parks).values_list('country_indexes__id', flat=True)
                all_ids = countries + list(parks_countries)
                yas_score_subquery = YASScore.objects \
                    .filter(tour_operator_id=OuterRef('id')) \
                    .filter(country_index_id__in=all_ids) \
                    .order_by().values('tour_operator_id')
                packages_yas_score_avg = yas_score_subquery \
                    .annotate(avg=Avg('yas_score')) \
                    .values('avg')
                queryset = queryset.annotate(
                    new_yas_score=Subquery(packages_yas_score_avg, output_field=IntegerField())) \
                    .order_by('-new_yas_score')
        else:
            queryset = queryset.order_by(ordering)
        queryset.parks = parks
        queryset.countries = countries
        return queryset
Esempio n. 15
0
    def get_queryset(self):
        params = self.request.GET
        queryset = Itinerary.objects.filter(date_deleted__isnull=True)
        parks = params.getlist('parks[]', []) + params.getlist('parks', [])
        countries = params.getlist('countries[]', []) + params.getlist(
            'countries', [])
        days = params.get('days', None)
        min_price = params.get('min_price', None)
        max_price = params.get('max_price', None)
        from_date = params.get('from_date', None)
        to_date = params.get('to_date', None)
        travelers = params.get('adult_travelers', None)
        main_focus = params.get('main_focus', None)
        secondary_focus = params.getlist(
            'secondary_focus[]', []) + params.getlist('secondary_focus', [])
        safary_preference = params.get('safary_preference', None)
        activity_levels = params.getlist(
            'activity_levels[]', []) + params.getlist('activity_levels', [])
        ordering = params.get('ordering', '-id')
        operators = []
        # slug. if load with api
        slug = json.loads(params.get('slug', 'null'))

        # slug. if load page and render from server
        if hasattr(self, 'slug') and self.slug:
            slug = self.slug

        if slug:
            if slug['model'] == 'CountryIndex':
                countries = [slug['id']] + countries
            if slug['model'] == 'Park': parks = [slug['id']] + parks
            if slug['model'] == 'TourOperator': operators.append(slug['id'])

        if len(parks) or len(countries):
            queryset = queryset.filter(
                Q(country_indexes__in=countries) | Q(parks__in=parks))

        if (len(operators)):
            queryset = queryset.filter(tour_operator__in=operators)

        if days:
            queryset = queryset.filter(days=days)

        price_query = Q()
        if min_price: price_query &= Q(search_price__gte=min_price)
        if max_price: price_query &= Q(search_price__lte=max_price)
        if (min_price or max_price):
            queryset = queryset.filter(price_query)

        # Logic of filter: get all months between dates.
        # Check that package contain all this months.
        # Used "Spanning multi-valued relationships": https://docs.djangoproject.com/en/dev/topics/db/queries/
        months = []
        if from_date: from_date = datetime.strptime(from_date, '%m/%d/%Y')
        if to_date: to_date = datetime.strptime(to_date, '%m/%d/%Y')
        if from_date and to_date:
            months = self.months_between(from_date, to_date)
        elif from_date:
            months.append(from_date.month)
        elif to_date:
            months.append(to_date.month)
        if len(months):
            for month in months:
                queryset = queryset.filter(months=month)

        # TODO: Hmm..
        # if travelers:
        #     queryset.filter()

        if len(secondary_focus):
            queryset = queryset.filter(
                secondary_focus_activity__in=secondary_focus)

        if safary_preference:
            queryset = queryset.filter(itinerary_type_id=safary_preference)

        if main_focus:
            queryset = queryset.filter(safari_focus_activity_id=main_focus)

        if len(activity_levels):
            ACTIVITIES = {
                'Easy': 'Easy',
                'Moderate': 'Moderate',
                'Strenuous': 'Strenuous',
            }
            activity_levels_query = Q()

            if 'Easy' in activity_levels:
                activity_levels_query |= Q((Q(
                    activity_level_name=ACTIVITIES['Easy']))
                                           | (Q(activity_level_name='')))
            if 'Moderate' in activity_levels:
                activity_levels_query |= Q(
                    activity_level_name=ACTIVITIES['Moderate'])
            if 'Strenuous' in activity_levels:
                activity_levels_query |= Q(
                    activity_level_name=ACTIVITIES['Strenuous'])

            queryset = queryset.filter(activity_levels_query)

        queryset = queryset.annotate(yas_score=F('tour_operator__yas_score'))
        if ordering:
            #yas score
            if ordering == '-yas_score':
                #yas score by country
                if len(parks) or len(countries):
                    parks_countries = Park.objects.filter(
                        id__in=parks).values_list('country_indexes__id',
                                                  flat=True)
                    all_ids = countries + list(parks_countries)
                    yas_score_subquery = YASScore.objects \
                        .filter(tour_operator_id=OuterRef('tour_operator_id')) \
                        .filter(country_index_id__in=all_ids) \
                        .order_by().values('tour_operator_id')
                    packages_yas_score_avg = yas_score_subquery \
                        .annotate(avg=Avg('yas_score')) \
                        .values('avg')
                    queryset = queryset.annotate(
                        yas_score=Subquery(packages_yas_score_avg, output_field=IntegerField())) \
                        .order_by('-yas_score')
                else:
                    #when there's no country selected
                    #nothing to do
                    pass
            queryset = queryset.order_by(ordering)
        else:
            queryset = queryset.order_by(
                F('safari_focus_activity__priority').desc(nulls_last=True))
        queryset = queryset.distinct()

        # For page title and discription in ItineraryView.page_head
        queryset.parks = parks
        queryset.countries = countries
        queryset.main_focus = main_focus
        queryset.secondary_focus = secondary_focus

        return queryset
Esempio n. 16
0
def run(*args):
    qs = Account.objects.filter(info__rating__isnull=False)
    qs = qs.annotate(info_rating=Cast(JSONF('info__rating'), IntegerField()))
    qs = qs.exclude(rating=F('info_rating'))
    ret = qs.update(rating=F('info_rating'))
    print(ret)
Esempio n. 17
0
def product_type_counts(request):
    form = ProductTypeCountsForm()
    opened_in_period_list = []
    oip = None
    cip = None
    aip = None
    all_current_in_pt = None
    top_ten = None
    pt = None
    today = timezone.now()
    first_of_month = today.replace(day=1,
                                   hour=0,
                                   minute=0,
                                   second=0,
                                   microsecond=0)
    mid_month = first_of_month.replace(day=15,
                                       hour=23,
                                       minute=59,
                                       second=59,
                                       microsecond=999999)
    end_of_month = mid_month.replace(day=monthrange(today.year,
                                                    today.month)[1],
                                     hour=23,
                                     minute=59,
                                     second=59,
                                     microsecond=999999)
    start_date = first_of_month
    end_date = end_of_month

    if request.method == 'GET' and 'month' in request.GET and 'year' in request.GET and 'product_type' in request.GET:
        form = ProductTypeCountsForm(request.GET)
        if form.is_valid():
            pt = form.cleaned_data['product_type']
            user_has_permission_or_403(request.user, pt,
                                       Permissions.Product_Type_View)
            month = int(form.cleaned_data['month'])
            year = int(form.cleaned_data['year'])
            first_of_month = first_of_month.replace(month=month, year=year)

            month_requested = datetime(year, month, 1)

            end_of_month = month_requested.replace(day=monthrange(
                month_requested.year, month_requested.month)[1],
                                                   hour=23,
                                                   minute=59,
                                                   second=59,
                                                   microsecond=999999)
            start_date = first_of_month
            start_date = datetime(start_date.year,
                                  start_date.month,
                                  start_date.day,
                                  tzinfo=timezone.get_current_timezone())
            end_date = end_of_month
            end_date = datetime(end_date.year,
                                end_date.month,
                                end_date.day,
                                tzinfo=timezone.get_current_timezone())

            oip = opened_in_period(start_date, end_date, pt)

            # trending data - 12 months
            for x in range(12, 0, -1):
                opened_in_period_list.append(
                    opened_in_period(start_date + relativedelta(months=-x),
                                     end_of_month + relativedelta(months=-x),
                                     pt))

            opened_in_period_list.append(oip)

            closed_in_period = Finding.objects.filter(
                mitigated__date__range=[start_date, end_date],
                test__engagement__product__prod_type=pt,
                severity__in=('Critical', 'High', 'Medium',
                              'Low')).values('numerical_severity').annotate(
                                  Count('numerical_severity')).order_by(
                                      'numerical_severity')

            total_closed_in_period = Finding.objects.filter(
                mitigated__date__range=[start_date, end_date],
                test__engagement__product__prod_type=pt,
                severity__in=('Critical', 'High', 'Medium',
                              'Low')).aggregate(total=Sum(
                                  Case(When(severity__in=('Critical', 'High',
                                                          'Medium', 'Low'),
                                            then=Value(1)),
                                       output_field=IntegerField())))['total']

            overall_in_pt = Finding.objects.filter(
                date__lt=end_date,
                verified=True,
                false_p=False,
                duplicate=False,
                out_of_scope=False,
                mitigated__isnull=True,
                test__engagement__product__prod_type=pt,
                severity__in=('Critical', 'High', 'Medium',
                              'Low')).values('numerical_severity').annotate(
                                  Count('numerical_severity')).order_by(
                                      'numerical_severity')

            total_overall_in_pt = Finding.objects.filter(
                date__lte=end_date,
                verified=True,
                false_p=False,
                duplicate=False,
                out_of_scope=False,
                mitigated__isnull=True,
                test__engagement__product__prod_type=pt,
                severity__in=('Critical', 'High', 'Medium',
                              'Low')).aggregate(total=Sum(
                                  Case(When(severity__in=('Critical', 'High',
                                                          'Medium', 'Low'),
                                            then=Value(1)),
                                       output_field=IntegerField())))['total']

            all_current_in_pt = Finding.objects.filter(
                date__lte=end_date,
                verified=True,
                false_p=False,
                duplicate=False,
                out_of_scope=False,
                mitigated__isnull=True,
                test__engagement__product__prod_type=pt,
                severity__in=('Critical', 'High', 'Medium',
                              'Low')).prefetch_related(
                                  'test__engagement__product',
                                  'test__engagement__product__prod_type',
                                  'test__engagement__risk_acceptance',
                                  'reporter').order_by('numerical_severity')

            top_ten = Product.objects.filter(
                engagement__test__finding__date__lte=end_date,
                engagement__test__finding__verified=True,
                engagement__test__finding__false_p=False,
                engagement__test__finding__duplicate=False,
                engagement__test__finding__out_of_scope=False,
                engagement__test__finding__mitigated__isnull=True,
                engagement__test__finding__severity__in=('Critical', 'High',
                                                         'Medium', 'Low'),
                prod_type=pt)
            top_ten = severity_count(
                top_ten, 'annotate',
                'engagement__test__finding__severity').order_by(
                    '-critical', '-high', '-medium', '-low')[:10]

            cip = {
                'S0': 0,
                'S1': 0,
                'S2': 0,
                'S3': 0,
                'Total': total_closed_in_period
            }

            aip = {
                'S0': 0,
                'S1': 0,
                'S2': 0,
                'S3': 0,
                'Total': total_overall_in_pt
            }

            for o in closed_in_period:
                cip[o['numerical_severity']] = o['numerical_severity__count']

            for o in overall_in_pt:
                aip[o['numerical_severity']] = o['numerical_severity__count']
        else:
            messages.add_message(
                request,
                messages.ERROR,
                "Please choose month and year and the Product Type.",
                extra_tags='alert-danger')

    add_breadcrumb(title="Bi-Weekly Metrics", top_level=True, request=request)

    return render(
        request, 'dojo/pt_counts.html', {
            'form': form,
            'start_date': start_date,
            'end_date': end_date,
            'opened_in_period': oip,
            'trending_opened': opened_in_period_list,
            'closed_in_period': cip,
            'overall_in_pt': aip,
            'all_current_in_pt': all_current_in_pt,
            'top_ten': top_ten,
            'pt': pt
        })
Esempio n. 18
0
 def test_filter_annotation(self):
     books = Book.objects.annotate(
         is_book=Value(1, output_field=IntegerField())
     ).filter(is_book=1)
     for book in books:
         self.assertEqual(book.is_book, 1)
Esempio n. 19
0
class RequestSkill(models.Model):
    id = CharField(max_length=36, default=uuid4, primary_key=True)
    path = CharField(max_length=190, db_index=True)
    query_params = TextField(blank=True, default='')
    raw_body = TextField(blank=True, default='')
    body = TextField(blank=True, default='')
    method = CharField(max_length=10)
    start_time = DateTimeField(default=timezone.now, db_index=True)
    view_name = CharField(max_length=190,
                          db_index=True,
                          blank=True,
                          default='',
                          null=True)
    end_time = DateTimeField(null=True, blank=True)
    time_taken = FloatField(blank=True, null=True)
    encoded_headers = TextField(blank=True, default='')  # stores json
    meta_time = FloatField(null=True, blank=True)
    meta_num_queries = IntegerField(null=True, blank=True)
    meta_time_spent_queries = FloatField(null=True, blank=True)
    pyprofile = TextField(blank=True, default='')
    prof_file = FileField(max_length=300, blank=True, storage=silk_storage)

    # Useful method to create shortened copies of strings without losing start and end context
    # Used to ensure path and view_name don't exceed 190 characters
    def _shorten(self, string):
        return '%s...%s' % (string[:94], string[len(string) - 93:])

    @property
    def total_meta_time(self):
        return (self.meta_time or 0) + (self.meta_time_spent_queries or 0)

    @property
    def profile_table(self):
        for n, columns in enumerate(parse_profile(self.pyprofile)):
            location = columns[-1]
            if n and '{' not in location and '<' not in location:
                r = re.compile('(?P<src>.*\.py)\:(?P<num>[0-9]+).*')
                m = r.search(location)
                group = m.groupdict()
                src = group['src']
                num = group['num']
                name = 'c%d' % n
                fmt = '<a name={name} href="?pos={n}&file_path={src}&line_num={num}#{name}">{location}</a>'
                rep = fmt.format(**dict(group, **locals()))
                yield columns[:-1] + [mark_safe(rep)]
            else:
                yield columns

    # defined in atomic transaction within SQLQuery save()/delete() as well
    # as in bulk_create of SQLQueryManager
    # TODO: This is probably a bad way to do this, .count() will prob do?
    num_sql_queries = IntegerField(default=0)  # TODO replace with count()

    @property
    def time_spent_on_sql_queries(self):
        """
        TODO: Perhaps there is a nicer way to do this with Django aggregates?
        My initial thought was to perform:
        SQLQuery.objects.filter.aggregate(Sum(F('end_time')) - Sum(F('start_time')))
        However this feature isnt available yet, however there has been talk
        for use of F objects within aggregates for four years
        here: https://code.djangoproject.com/ticket/14030. It looks
        like this will go in soon at which point this should be changed.
        """
        return sum(x.time_taken for x in SQLQuery.objects.filter(request=self))

    @property
    def headers(self):
        if self.encoded_headers:
            raw = json.loads(self.encoded_headers)
        else:
            raw = {}

        return CaseInsensitiveDictionary(raw)

    @property
    def content_type(self):
        return self.headers.get('content-type', None)

    @classmethod
    def garbage_collect(cls, force=False):
        """ Remove Request/Responses when we are at the SILKY_MAX_RECORDED_REQUESTS limit
        Note that multiple in-flight requests may call this at once causing a
        double collection """
        check_percent = SilkyConfig().SILKY_MAX_RECORDED_REQUESTS_CHECK_PERCENT
        check_percent /= 100.0
        if check_percent < random.random() and not force:
            return
        target_count = SilkyConfig().SILKY_MAX_RECORDED_REQUESTS

        # Since garbage collection is probabilistic, the target count should
        # be lowered to account for requests before the next garbage collection
        if check_percent != 0:
            target_count -= int(1 / check_percent)

        # Make sure we can delete everything if needed by settings
        if target_count <= 0:
            cls.objects.all().delete()
            return

        try:
            time_cutoff = cls.objects.order_by('-start_time').values_list(
                'start_time', flat=True)[target_count]
        except IndexError:
            return

        cls.objects.filter(start_time__lte=time_cutoff).delete()

    def save(self, *args, **kwargs):
        # sometimes django requests return the body as 'None'
        if self.raw_body is None:
            self.raw_body = ''

        if self.body is None:
            self.body = ''

        if self.end_time and self.start_time:
            interval = self.end_time - self.start_time
            self.time_taken = interval.total_seconds() * 1000

        # We can't save if either path or view_name exceed 190 characters
        if self.path and len(self.path) > 190:
            self.path = self._shorten(self.path)

        if self.view_name and len(self.view_name) > 190:
            self.view_name = self._shorten(self.view_name)

        super(RequestSkill, self).save(*args, **kwargs)
        RequestSkill.garbage_collect(force=False)
Esempio n. 20
0
 def test_null_annotation(self):
     """
     Annotating None onto a model round-trips
     """
     book = Book.objects.annotate(no_value=Value(None, output_field=IntegerField())).first()
     self.assertIsNone(book.no_value)
Esempio n. 21
0
def update_vote_related_counters(sender, instance, **kwargs):
    article_ct = ContentType.objects.get_for_model(Article)
    comment_ct = ContentType.objects.get_for_model(Comment)
    choice_ct = ContentType.objects.get_for_model(Choice)

    if instance.content_type_id == comment_ct.id:
        data = {
            'karma':
            Coalesce(Subquery(Vote.objects.filter(
                object_id=OuterRef('id'),
                content_type=comment_ct).order_by().values(
                    'object_id').annotate(sum=Sum('score')).values('sum'),
                              output_field=IntegerField()),
                     Value(0),
                     output_field=IntegerField())
        }
        Comment.objects.filter(id=instance.object_id).update(**data)

    elif instance.content_type_id == choice_ct.id:
        data = {
            'vote_count':
            Coalesce(Subquery(Vote.objects.filter(
                object_id=OuterRef('id'),
                content_type=choice_ct).order_by().values(
                    'object_id').annotate(count=Count('id')).values('count'),
                              output_field=IntegerField()),
                     Value(0),
                     output_field=IntegerField())
        }
        Choice.objects.filter(id=instance.object_id).update(**data)

        data = {
            'sum_votes':
            Coalesce(Subquery(Choice.objects.filter(
                poll=OuterRef('id')).order_by().values('poll').annotate(
                    sum=Sum('vote_count')).values('sum'),
                              output_field=IntegerField()),
                     Value(0),
                     output_field=IntegerField())
        }
        Poll.objects.filter(choice__id=instance.object_id).update(**data)

    elif instance.content_type_id == article_ct.id:
        # TODO: bad part of code, can be made more optimally through the RawSQL, but I wanted to use ORM
        data = {
            'rating':
            Coalesce(Subquery(Vote.objects.filter(
                object_id=OuterRef('id'),
                content_type=article_ct).order_by().values(
                    'object_id').annotate(avg=Avg('score')).values('avg'),
                              output_field=IntegerField()),
                     Value(0),
                     output_field=IntegerField()),
            'vote_count':
            Coalesce(Subquery(Vote.objects.filter(
                object_id=OuterRef('id'),
                content_type=article_ct).order_by().values(
                    'object_id').annotate(count=Count('id')).values('count'),
                              output_field=IntegerField()),
                     Value(0),
                     output_field=IntegerField())
        }
        Article.objects.filter(id=instance.object_id).update(**data)
Esempio n. 22
0
    def handle(self, *args, **options):
        # test_products = Product.objects.filter(Q(category__name="офис") | Q(category__name="модерн")).select_related()
        # print(len(test_products))
        # print(test_products.count())
        # print(test_products)
        # db_profile_by_type("learn db", "", connection.queries)

        # Number of sale
        ACTION_1 = 1
        ACTION_2 = 2
        ACTION_EXPIRED = 3

        # Period of sale
        action_1__time_delta = timedelta(hours=12)
        action_2__time_delta = timedelta(days=1)

        # Discount amount
        action_1__discount = 0.3
        action_2__discount = 0.15
        action_expired__discount = 0.05

        # Conditions of sale: returns True or False of condition.
        # F objects means that all calculation will be in database with SQL query
        action_1__condition = Q(order__updated__lte=F("order__created") + action_1__time_delta)
        action_2__condition = Q(order__updated__gt=F("order__created") + action_1__time_delta) & Q(
            order__updated__lte=F("order__created") + action_2__time_delta
        )
        action_expired__condition = Q(order__updated__gt=F("order__created") + action_2__time_delta)

        # Condition mapping with number of sale: when Q object returns True, then return number of sale
        action_1__order = When(action_1__condition, then=ACTION_1)
        action_2__order = When(action_2__condition, then=ACTION_2)
        action_expired__order = When(action_expired__condition, then=ACTION_EXPIRED)

        # Condition mapping with discount: when Q object returns True, then calculate discount
        # Second 'when' with '-' for decrease ordering
        # F objects means that all calculation will be in database with SQL query
        action_1__price = When(action_1__condition, then=F("product__price") * F("quantity") * action_1__discount)
        action_2__price = When(action_2__condition, then=F("product__price") * F("quantity") * -action_2__discount)
        action_expired__price = When(
            action_expired__condition, then=F("product__price") * F("quantity") * action_expired__discount
        )

        # First: we need to annotate every object of OrderItem model with virtual field with number of sale
        # Second: we need to annotate every object of OrderItem model with virtual field with value of discount
        test_orders = (
            OrderItem.objects.annotate(
                action_order=Case(
                    action_1__order,
                    action_2__order,
                    action_expired__order,
                    output_field=IntegerField(),
                )
            )
            .annotate(
                total_discount=Case(
                    action_1__price,
                    action_2__price,
                    action_expired__price,
                    output_field=DecimalField(),
                )
            )
            .order_by("action_order", "total_discount")
            .select_related()
        )

        for orderitem in test_orders:
            print(
                f"{orderitem.action_order:2}: заказ №{orderitem.pk:3}: {orderitem.product.name:15}: скидка {abs(orderitem.total_discount):9.2f} руб. | {orderitem.order.updated - orderitem.order.created}"
            )
Esempio n. 23
0
class Session(Model):
    patient = ForeignKey(Patient, on_delete=CASCADE)
    number = IntegerField(default=0)
    datetime = DateTimeField('date of session')
    csv = FileField('full path to csv', max_length=200)
    frequency_bands = CharField('frequency bands',
                                max_length=200,
                                default='0.025, 0.48, 1.22, 1.52')
    frequency_labels = CharField('frequency labels',
                                 max_length=200,
                                 default='Off, EMs, Alpha, Beta_1, Beta_2')
    phase_bands = CharField('phase_bands',
                            max_length=200,
                            default='0.2, 0.6, 1.2, 1.7')
    phase_labels = CharField('phase labels',
                             max_length=200,
                             default='Off, Async, Alt, Sync, EMs')
    objects = SessionManager()
    _df = None
    _divergence_magnitudes = None
    _convergence_magnitudes = None

    def __str__(self) -> str:
        return 'Session  ' + str(self.number)

    def convergence_magnitudes_split_total(self) -> List[float]:
        """
        Returns 2-list with left/right hand total convergence magnitudes.
        :return: 2-list
        """
        sum_of_mags = [0, 0]
        for tup in self.convergence_magnitudes_split():
            sum_of_mags[0] += tup[0]
            sum_of_mags[1] += tup[1]
        return [sum_of_mags[0], sum_of_mags[1]]

    def divergence_magnitudes_split_total(self) -> List[float]:
        """
        Returns 2-list with left/right hand total divergence magnitudes.
        :return: 2-list
        """
        sum_of_mags = [0, 0]
        for tup in self.divergence_magnitudes_split():
            sum_of_mags[0] += tup[0]
            sum_of_mags[1] += tup[1]
        return [sum_of_mags[0], sum_of_mags[1]]

    @round_by
    def convergence_magnitudes_total(self) -> float:
        """
        Returns float of left hand convergence + right hand convergence.
        :return: float
        """
        sum_of_mags = abs(self.convergence_magnitudes_split_total()[0]) + abs(
            self.convergence_magnitudes_split_total()[1])
        return sum_of_mags

    @round_by
    def divergence_magnitudes_total(self) -> float:
        """
        Returns float of left hand divergence + right hand divergence.
        :return: float
        """
        sum_of_mags = abs(self.divergence_magnitudes_split_total()[0]) + abs(
            self.divergence_magnitudes_split_total()[1])
        return sum_of_mags

    def measure_dictionary(self) -> dict:
        """
        Calls every method with an attribute 'measure' == True and stores result in a dictionary.
        These methods need to be without any arguments (other than self).
        :return: dict
        """
        measures = dict()
        method_list = []
        for func_str in dir(self):
            if not func_str == 'objects' and not func_str.startswith("__") and not \
                    func_str.startswith('measure_dictionary') and callable(getattr(self, func_str)):
                method_list.append(func_str)
        for func_str in method_list:
            func = getattr(self, func_str)
            try:
                result = func()
                print(func, file=open('dump.txt', 'w'))
            except:
                continue
            if hasattr(func, 'measure'):
                if func.measure:
                    print(func_str, file=open('dump.txt', 'w'))
                    measures[func_str] = result
        return measures

    def df(self, trim_start_by=2):
        """
        Loads in csv file as a pd.DataFrame. Knocks of the first couple of rows.
        """
        if self._df is None:
            self._df = pd.read_csv(os.path.join(
                os.getcwd(), self.csv.name))[trim_start_by:].reset_index()
        return self._df

    @clean_df
    @measure
    @additive
    def duration(self) -> int:
        return len(self.df()['Time'])

    @clean_df
    def right_hand_data(self) -> pd.Series:
        return self.df()['Right']

    @clean_df
    def left_hand_data(self) -> pd.Series:
        return self.df()['Left']

    @clean_df
    def frequency_data(self) -> pd.Series:
        return self.df()['Frequency']

    @clean_df
    def phase_data(self) -> pd.Series:
        return self.df()['Phase']

    @clean_df
    @measure
    @additive
    def convergence_magnitudes(self) -> List[float]:
        """
        Returns list size of df length with 0 for every non-convergent value
        and with the magnitude of the convergence for every convergent value.
        :return: list[float]
        """
        if self._convergence_magnitudes is not None:
            return self._convergence_magnitudes
        left_hand_data = self.left_hand_data()
        right_hand_data = self.right_hand_data()
        magnitudes = [0] * len(right_hand_data)
        for i in range(len(right_hand_data)):
            if i == 0:
                continue
            top = left_hand_data.copy()
            bot = right_hand_data.copy()
            if top[i] < bot[i]:  # then swap them round
                top, bot = bot, top
            if top[i] < top[i - 1]:  # top is decreasing
                if bot[i] > bot[
                        i -
                        1]:  # bot is increasing. Therefore they're converging
                    size_of_convergence = top[i -
                                              1] - top[i] + bot[i] - bot[i - 1]
                    magnitudes[i] = size_of_convergence
        self._convergence_magnitudes = magnitudes
        return magnitudes

    @clean_df
    @measure
    @additive
    def divergence_magnitudes(self) -> List[float]:
        """
        Returns List size of df length with 0. for every non-divergent value
        and with the magnitude of the divergence for every divergent value.
        :return: list[float]
        """
        if self._divergence_magnitudes is not None:
            return self._divergence_magnitudes
        left_hand_data = self.left_hand_data()
        right_hand_data = self.right_hand_data()
        magnitudes = [0] * len(right_hand_data)
        for i in range(len(right_hand_data)):
            if i == 0:
                continue
            top = left_hand_data.copy()
            bot = right_hand_data.copy()
            if top[i] < bot[i]:  # then swap them round
                top, bot = bot, top
            if top[i] > top[i - 1]:  # top is increasing
                if bot[i] < bot[
                        i -
                        1]:  # bot is decreasing. Therefore they're diverging
                    size_of_divergence = top[i] - top[i - 1] + bot[i -
                                                                   1] - bot[i]
                    magnitudes[i] = size_of_divergence
        self._divergence_magnitudes = magnitudes
        return magnitudes

    @measure
    @additive
    @clean_df
    def divergence_magnitudes_split(self) -> List[tuple]:
        """
        Returns List with size of df length with (0,0) for every non-divergent value
        and with magnitude of left, right divergence respectively for divergent values.
        So if there is a divergence and left hand is top, then tuple will have positive
        first value and negative second value.
        :return: list(tuple(float, float))
        """
        left_hand_data = self.left_hand_data()
        right_hand_data = self.right_hand_data()
        magnitudes = [(0, 0)] * len(right_hand_data)
        for i in range(len(right_hand_data)):
            if i == 0:
                continue
            top = left_hand_data.copy()
            bot = right_hand_data.copy()
            left_top = True
            if top[i] < bot[i]:  # then swap them round
                left_top = False
                top, bot = bot, top
            if top[i] > top[i - 1]:  # top is increasing
                if bot[i] < bot[
                        i -
                        1]:  # bot is decreasing. Therefore they're diverging
                    size_of_top_div = top[i] - top[i - 1]
                    size_of_bot_div = bot[i] - bot[i - 1]
                    if left_top:
                        magnitudes[i] = (size_of_top_div, size_of_bot_div)
                        continue
                    magnitudes[i] = (size_of_bot_div, size_of_top_div)

        return magnitudes

    @measure
    @additive
    @clean_df
    def convergence_magnitudes_split(self) -> List[tuple]:
        """
        Returns List with size of df length with (0,0) for every non-convergent value
        and with magnitude of left, right convergence respectively for convergent values.
        So if there is a convergence and left hand is top, then tuple will have positive
        first value and negative second value.
        :return: list(tuple(float, float))
        """
        left_hand_data = self.left_hand_data()
        right_hand_data = self.right_hand_data()
        magnitudes = [(0, 0)] * len(right_hand_data)
        for i in range(len(self.right_hand_data())):
            if i == 0:
                continue
            top = left_hand_data.copy()
            bot = right_hand_data.copy()
            left_top = True
            if top[i] < bot[i]:  # then swap them round
                left_top = False
                top, bot = bot, top
            if top[i] < top[i - 1]:  # top is decreasing
                if bot[i] > bot[
                        i -
                        1]:  # bot is increasing. Therefore they're converging
                    size_of_top_con = top[i] - top[i - 1]
                    size_of_bot_con = bot[i] - bot[i - 1]
                    if left_top:
                        magnitudes[i] = (size_of_top_con, size_of_bot_con)
                        continue
                    magnitudes[i] = (size_of_bot_con, size_of_top_con)

        return magnitudes

    @clean_df
    def convergence_count_bagged_against_time(
            self, number_of_bags: int) -> List[int]:
        """
        Returns count of convergence in each time interval.
        :param number_of_bags: int
        :return: List[int]
        """
        assert number_of_bags != 0
        convergence_magnitudes = self.convergence_magnitudes()
        bags = [0] * number_of_bags
        for i, value in enumerate(convergence_magnitudes):
            if value != 0:
                bag_pos = int(
                    (i / len(convergence_magnitudes)) * number_of_bags)
                bags[bag_pos] += 1
        return bags

    @clean_df
    def divergence_count_bagged_against_time(self,
                                             number_of_bags: int) -> List[int]:
        """
        Returns count of divergence in each time interval.
        :param number_of_bags: int
        :return: List[int]
        """
        assert number_of_bags != 0
        divergence_magnitudes = self.divergence_magnitudes()
        bags = [0] * number_of_bags
        for i, value in enumerate(divergence_magnitudes):
            if value != 0:
                bag_pos = int(
                    (i / len(divergence_magnitudes)) * number_of_bags)
                bags[bag_pos] += 1
        return bags

    # TODO redundant?
    @clean_df
    @measure
    def divergence_count_time_percentage(self) -> List[int]:
        """
        Returns count of divergence for each time step, where a time step is duration/100.
        :return: list[int]
        """
        return self.divergence_count_bagged_against_time(100)

    # TODO redundant?
    @clean_df
    @measure
    def convergence_count_time_percentage(self) -> List[int]:
        """
        Returns count of convergence for each time step, where a time step is duration/100.
        :return: list[int]
        """
        return self.convergence_count_bagged_against_time(100)

    @clean_df
    @measure
    @additive
    @round_by
    def variance_convergence_magnitudes(self) -> float:
        """
        Returns variance of convergence magnitudes EXCLUDING 0 values in calculation.
        :return: float
        """
        convergence_magnitudes = self.convergence_magnitudes()
        non_zero_magnitudes = [
            mag for mag in convergence_magnitudes if mag != 0
        ]
        return np.var(non_zero_magnitudes)

    @clean_df
    @measure
    @additive
    @round_by
    def variance_divergence_magnitudes(self) -> float:
        """
        Returns variance of divergence magnitudes EXCLUDING 0 values in calculation.
        :return: float
        """
        divergence_magnitudes = self.divergence_magnitudes()
        non_zero_magnitudes = [
            mag for mag in divergence_magnitudes if mag != 0
        ]
        return np.var(non_zero_magnitudes)

    @clean_df
    @measure
    @additive
    @round_by
    def median_convergence_magnitudes(self) -> float:
        """
        Returns median of convergence magnitudes EXCLUDING 0 values in calculation.
        :return: float
        """
        convergence_magnitudes = self.convergence_magnitudes()
        non_zero_magnitudes = [
            mag for mag in convergence_magnitudes if mag != 0
        ]
        return np.median(non_zero_magnitudes)

    @clean_df
    @measure
    @additive
    @round_by
    def median_divergence_magnitudes(self) -> float:
        """
        Returns median of divergence magnitudes EXCLUDING 0 values in calculation.
        :return: float
        """
        divergence_magnitudes = self.divergence_magnitudes()
        non_zero_magnitudes = [
            mag for mag in divergence_magnitudes if mag != 0
        ]
        return np.median(non_zero_magnitudes)

    @clean_df
    @measure
    @additive
    @round_by
    def mean_divergence_magnitudes(self) -> float:
        """
        Returns mean of divergence magnitudes EXCLUDING 0 values in calculation.
        :return: float
        """
        divergence_magnitudes = self.divergence_magnitudes()
        non_zero_magnitudes = [
            mag for mag in divergence_magnitudes if mag != 0
        ]
        return np.mean(non_zero_magnitudes)

    @clean_df
    @measure
    @additive
    @round_by
    def mean_convergence_magnitudes(self) -> float:
        """
        Returns mean of convergence magnitudes EXCLUDING 0 values in calculation.
        :return: float
        """
        convergence_magnitudes = self.convergence_magnitudes()
        non_zero_magnitudes = [
            mag for mag in convergence_magnitudes if mag != 0
        ]
        return np.mean(non_zero_magnitudes)

    @clean_df
    @measure
    @additive
    @round_by
    def convergence_score(self) -> float:
        """
        Return the percentage of convergence instances. That is, when
        left and right hand values are both moving towards each other.
        :return: float
        """
        convergence_magnitudes = self.convergence_magnitudes()
        count = len([mag for mag in convergence_magnitudes if mag != 0])
        return (count / len(self.left_hand_data())) * 100

    @clean_df
    @measure
    @additive
    @round_by
    def divergence_score(self) -> float:
        """
        Return the percentage of divergence instances. That is, when
        left and right hand values are both moving away from each other.
        :return: float
        """
        divergence_magnitudes = self.divergence_magnitudes()
        count = len([mag for mag in divergence_magnitudes if mag != 0])
        return (count / len(self.left_hand_data())) * 100

    @clean_df
    @measure
    @round_by
    def total_shift(self) -> float:
        """
        Returns the  in the shift between left hand and right hand
        from first reading to last reading. Right hand is assumed to be on top.
        :return: float
        """
        return self.right_shift() - self.left_shift()

    @clean_df
    @measure
    @round_by
    def left_shift(self) -> float:
        """
        Returns shift in left hand data from first value to last.
        Positive value means it's shifted upwards.
        :return: float
        """
        return self.left_hand_data().iloc[-1] - self.left_hand_data().iloc[0]

    @clean_df
    @measure
    @round_by
    def right_shift(self) -> float:
        """
        Returns shift in right hand data from first value to last.
        Positive value means it's shifted upwards.
        :return: float
        """
        return self.right_hand_data().iloc[-1] - self.right_hand_data().iloc[0]

    @clean_df
    def bag_values_by_bands(self, bands: List[float],
                            column_header: str) -> List[List[float]]:
        """
        For a column in df specified by column_header, values are bagged according to bands.
        :param bands: List[float] must be sorted
        :param column_header: str must match a column header in df
        :return: List[int] of length len(bands) + 1
        """
        bags = [[]] * (len(bands) + 1)
        df = self.df()
        for value in df[column_header]:
            for i, band in enumerate(bands):
                if float(value) < float(
                        band
                ):  # if value is under current band, add value to bag
                    bags[i] += [
                        value
                    ]  # after bags is updated, move on to next value
                    break
            bags[-1] += [
                value
            ]  # if this line is reached then the value must be larger than highest band value,
            # so add it to last bag
        return bags

    @clean_df
    def count_bags(self, bags: List[List[float]]) -> List[int]:
        """
        Returns List of same size as bags but it's values are the size of each bag.
        :param bags: List[Lists of floats]
        :return: List[int]
        """
        return [len(bag) for bag in bags]

    @clean_df
    @measure
    def convergence_magnitudes_bagged_by_phase(self) -> List[float]:
        """
        Returns a list of floats with length the same as self.phase_bands. Each float
        is the sum of all convergence magnitudes that occurred at that band.
        :return: List[float]
        """
        # the +1 is necessary since there is a bag above last threshold
        phase_bands = self.phase_bands.split(', ')
        magnitudes_bagged = [0] * (len(phase_bands) + 1)
        phase_data = self.phase_data()
        convergence_magnitudes = self.convergence_magnitudes()
        for i, mag in enumerate(convergence_magnitudes):
            if mag != 0:
                phase = phase_data[i]
                for j, band in enumerate(phase_bands):
                    if float(phase) < float(band):
                        magnitudes_bagged[j] += mag
                        break
                else:
                    magnitudes_bagged[-1] += mag
        assert (len(magnitudes_bagged) == len(phase_bands) + 1)
        return magnitudes_bagged

    @clean_df
    @measure
    def convergence_magnitudes_bagged_by_frequency(self) -> List[float]:
        """
        Returns a list of floats with length the same as self.frequency_bands. Each float
        is the sum of all convergence magnitudes that occurred at that band.
        :return: List[float]
        """
        # the +1 is necessary since there is a bag above last threshold
        frequency_bands = self.frequency_bands.split(', ')
        magnitudes_bagged = [0] * (len(frequency_bands) + 1)
        frequency_data = self.frequency_data()
        convergence_magnitudes = self.convergence_magnitudes()
        for i, mag in enumerate(convergence_magnitudes):
            if mag != 0:
                frequency = frequency_data[i]
                for j, band in enumerate(frequency_bands):
                    if float(frequency) < float(band):
                        magnitudes_bagged[j] += mag
                        break
                else:
                    magnitudes_bagged[-1] += mag
        assert (len(magnitudes_bagged) == len(frequency_bands) + 1)
        return magnitudes_bagged

    @clean_df
    @measure
    def divergence_magnitudes_bagged_by_phase(self) -> List[float]:
        """
        Returns a list of floats with length the same as self.phase_bands + 1. Each float
        is the sum of all divergence magnitudes that occurred at that band.
        :return: List[float]
        """
        # the +1 is necessary since there is a bag above last threshold
        phase_bands = self.phase_bands.split(', ')
        magnitudes_bagged = [0] * (len(phase_bands) + 1)
        phase_data = self.phase_data()
        divergence_magnitudes = self.divergence_magnitudes()
        for i, mag in enumerate(divergence_magnitudes):
            if mag != 0:
                phase = phase_data[i]
                for j, band in enumerate(phase_bands):
                    if float(phase) < float(band):
                        magnitudes_bagged[j] += mag
                        break
                else:
                    magnitudes_bagged[-1] += mag
        assert (len(magnitudes_bagged) == len(phase_bands) + 1)
        return magnitudes_bagged

    @clean_df
    @measure
    def divergence_magnitudes_bagged_by_frequency(self) -> List[float]:
        """
        Returns a list of floats with length the same as self.frequency_bands + 1. Each float
        is the sum of all divergence magnitudes that occurred at that band.
        :return: List[float]
        """
        # the +1 is necessary since there is a bag above last threshold
        frequency_bands = self.frequency_bands.split(', ')
        magnitudes_bagged = [0] * (len(frequency_bands) + 1)
        frequency_data = self.frequency_data()
        divergence_magnitudes = self.divergence_magnitudes()
        for i, mag in enumerate(divergence_magnitudes):
            if mag != 0:
                frequency = frequency_data[i]
                for j, band in enumerate(frequency_bands):
                    if float(frequency) < float(band):
                        magnitudes_bagged[j] += mag
                        break
                else:
                    magnitudes_bagged[-1] += mag
        assert (len(magnitudes_bagged) == len(frequency_bands) + 1)
        return magnitudes_bagged
Esempio n. 24
0
class Profile(Model):
    user = OneToOneField(User, on_delete=CASCADE)
    shoeSize = IntegerField(null=True, blank=True)
Esempio n. 25
0
class FiscalYear(Func):
    function = "EXTRACT"
    template = "%(function)s(YEAR from (%(expressions)s) + INTERVAL '3 months')"
    output_field = IntegerField()
Esempio n. 26
0
class Sensor(Model):
    IDPattern = "[-_:a-zA-Z0-9]+"

    s_id = IntegerField(primary_key=True)
    sensor_id = CharField("Sensor ID",
                          unique = True,
                          max_length=60,
                          validators=[RegexValidator(regex="^%s$" % IDPattern)])
    sensor_type = ForeignKey(SensorType)
    parent_device = ForeignKey(Device)
    data_type = ForeignKey(DataType, default="TEST")
    description = CharField("Description", max_length=256)
    on_line = BooleanField(default=True)
    last_value = FloatField(null=True, blank=True)
    last_timestamp = DateTimeField(null=True, blank=True)

    def __unicode__(self):
        return self.sensor_id

    def valid_input(self, input_value):
        return self.sensor_type.valid_input(input_value)


    # Can speicify *either* a start or number of values with keyword
    # arguments 'start' and 'num', but not both.
    def get_ts(self, num=None, start=None, end=None):
        return RawData.get_ts(self, num=num, start=start, end=end)


    def get_vectors(self, num=None, start=None, end=None):
        return RawData.get_vectors(self, num=num, start=start, end=end)

    def get_location(self):
        return self.parent_device.location
    

    def get_current(self, timeout_seconds):
        current = {}
        data_value = DataValue.objects.select_related('data_info__timestamp').filter(data_info__sensor=self).order_by('data_info__timestamp__timestamp').last()
        if data_value is None:
            return None

        ts = data_value.data_info.timestamp.timestamp
        value = data_value.value
        if timeout_seconds > 0 and timezone.now() - ts > datetime.timedelta(seconds=timeout_seconds):
            value = None

        location = data_value.data_info.location
        return {"sensorid"  : self.sensor_id,
                "timestamp" : data_value.data_info.timestamp.timestamp,
                "value"     : value,
                "location"  : {"latitude" : location.latitude, "longitude" : location.longitude}}


    def valid_post_key(self, key_string):
        return self.parent_device.valid_post_key(key_string)


    # This method returns a QuerySet - because that query set is
    # subsequently used to update the status of all the relevant
    # RawData records.
    def get_rawdata(self):
        qs = RawData.objects.filter(sensor_string_id=self.sensor_id,
                                    processed=False).values_list('id',
                                                                 'timestamp_data',
                                                                 'value').order_by('timestamp_data')

        return qs
Esempio n. 27
0
def conditional_sum(value=1, **cond):
    """Wrapper to generate SUM on boolean/enum values."""
    return Sum(Case(When(then=value, **cond), default=0, output_field=IntegerField()))
Esempio n. 28
0
    def operation_to_expression(self, rule):
        if not isinstance(rule, dict):
            return rule

        operator = list(rule.keys())[0]
        values = rule[operator]

        if not isinstance(values, list) and not isinstance(values, tuple):
            values = [values]

        if operator == 'buildTime':
            if values[0] == "custom":
                return Value(
                    dateutil.parser.parse(values[1]).astimezone(pytz.UTC))
            elif values[0] == "customtime":
                parsed = dateutil.parser.parse(values[1])
                return Value(now().astimezone(
                    self.list.event.timezone).replace(
                        hour=parsed.hour,
                        minute=parsed.minute,
                        second=parsed.second,
                        microsecond=parsed.microsecond,
                    ).astimezone(pytz.UTC))
            elif values[0] == 'date_from':
                return Coalesce(
                    F(f'subevent__date_from'),
                    F(f'order__event__date_from'),
                )
            elif values[0] == 'date_to':
                return Coalesce(
                    F(f'subevent__date_to'),
                    F(f'subevent__date_from'),
                    F(f'order__event__date_to'),
                    F(f'order__event__date_from'),
                )
            elif values[0] == 'date_admission':
                return Coalesce(
                    F(f'subevent__date_admission'),
                    F(f'subevent__date_from'),
                    F(f'order__event__date_admission'),
                    F(f'order__event__date_from'),
                )
            else:
                raise ValueError(f'Unknown time type {values[0]}')
        elif operator == 'objectList':
            return [self.operation_to_expression(v) for v in values]
        elif operator == 'lookup':
            return int(values[1])
        elif operator == 'var':
            if values[0] == 'now':
                return Value(now().astimezone(pytz.UTC))
            elif values[0] == 'product':
                return F('item_id')
            elif values[0] == 'variation':
                return F('variation_id')
            elif values[0] == 'entries_number':
                return Coalesce(Subquery(
                    Checkin.objects.filter(
                        position_id=OuterRef('pk'),
                        type=Checkin.TYPE_ENTRY,
                        list_id=self.list.pk).values('position_id').order_by().
                    annotate(c=Count('*')).values('c')),
                                Value(0),
                                output_field=IntegerField())
            elif values[0] == 'entries_today':
                midnight = now().astimezone(self.list.event.timezone).replace(
                    hour=0, minute=0, second=0, microsecond=0)
                return Coalesce(Subquery(
                    Checkin.objects.filter(
                        position_id=OuterRef('pk'),
                        type=Checkin.TYPE_ENTRY,
                        list_id=self.list.pk,
                        datetime__gte=midnight,
                    ).values('position_id').order_by().annotate(
                        c=Count('*')).values('c')),
                                Value(0),
                                output_field=IntegerField())
            elif values[0] == 'entries_days':
                tz = self.list.event.timezone
                return Coalesce(Subquery(
                    Checkin.objects.filter(
                        position_id=OuterRef('pk'),
                        type=Checkin.TYPE_ENTRY,
                        list_id=self.list.pk,
                    ).annotate(day=TruncDate('datetime', tzinfo=tz)).values(
                        'position_id').order_by().annotate(
                            c=Count('day', distinct=True)).values('c')),
                                Value(0),
                                output_field=IntegerField())
        else:
            raise ValueError(f'Unknown operator {operator}')
Esempio n. 29
0
class SQLQuery(models.Model):
    query = TextField()
    start_time = DateTimeField(null=True, blank=True, default=timezone.now)
    end_time = DateTimeField(null=True, blank=True)
    time_taken = FloatField(blank=True, null=True)
    identifier = IntegerField(default=-1)
    request = ForeignKey(
        Request,
        related_name='queries',
        null=True,
        blank=True,
        db_index=True,
        on_delete=models.CASCADE,
    )
    traceback = TextField()
    objects = SQLQueryManager()

    # TODO docstring
    @property
    def traceback_ln_only(self):
        return '\n'.join(self.traceback.split('\n')[::2])

    @property
    def formatted_query(self):
        return sqlparse.format(self.query, reindent=True, keyword_case='upper')

    # TODO: Surely a better way to handle this? May return false positives
    @property
    def num_joins(self):
        return self.query.lower().count('join ')

    @property
    def tables_involved(self):
        """
        A really another rudimentary way to work out tables involved in a
        query.
        TODO: Can probably parse the SQL using sqlparse etc and pull out table
        info that way?
        """
        components = [x.strip() for x in self.query.split()]
        tables = []

        for idx, component in enumerate(components):
            # TODO: If django uses aliases on column names they will be falsely
            # identified as tables...
            if component.lower() == 'from' or component.lower(
            ) == 'join' or component.lower() == 'as':
                try:
                    _next = components[idx + 1]
                    if not _next.startswith('('):  # Subquery
                        stripped = _next.strip().strip(',')

                        if stripped:
                            tables.append(stripped)
                except IndexError:  # Reach the end
                    pass
        return tables

    def prepare_save(self):
        if self.end_time and self.start_time:
            interval = self.end_time - self.start_time
            self.time_taken = interval.total_seconds() * 1000

        if not self.pk:
            if self.request:
                self.request.num_sql_queries += 1
                self.request.save(update_fields=['num_sql_queries'])

    @transaction.atomic()
    def save(self, *args, **kwargs):
        self.prepare_save()
        super(SQLQuery, self).save(*args, **kwargs)

    @transaction.atomic()
    def delete(self, *args, **kwargs):
        self.request.num_sql_queries -= 1
        self.request.save()
        super(SQLQuery, self).delete(*args, **kwargs)
Esempio n. 30
0
class Subnet(CleanSave, TimestampedModel):
    def __init__(self, *args, **kwargs):
        assert 'space' not in kwargs, "Subnets can no longer be in spaces."
        super().__init__(*args, **kwargs)

    objects = SubnetManager()

    name = CharField(blank=False,
                     editable=True,
                     max_length=255,
                     validators=[SUBNET_NAME_VALIDATOR],
                     help_text="Identifying name for this subnet.")

    description = TextField(null=False, blank=True)

    vlan = ForeignKey('VLAN',
                      default=get_default_vlan,
                      editable=True,
                      blank=False,
                      null=False,
                      on_delete=PROTECT)

    # XXX:fabric: unique constraint should be relaxed once proper support for
    # fabrics is implemented. The CIDR must be unique within a Fabric, not
    # globally unique.
    cidr = CIDRField(blank=False, unique=True, editable=True, null=False)

    rdns_mode = IntegerField(choices=RDNS_MODE_CHOICES,
                             editable=True,
                             default=RDNS_MODE.DEFAULT)

    gateway_ip = MAASIPAddressField(blank=True, editable=True, null=True)

    dns_servers = ArrayField(TextField(),
                             blank=True,
                             editable=True,
                             null=True,
                             default=list)

    allow_dns = BooleanField(editable=True,
                             blank=False,
                             null=False,
                             default=True)

    allow_proxy = BooleanField(editable=True,
                               blank=False,
                               null=False,
                               default=True)

    active_discovery = BooleanField(editable=True,
                                    blank=False,
                                    null=False,
                                    default=False)

    managed = BooleanField(editable=True,
                           blank=False,
                           null=False,
                           default=True)

    @property
    def label(self):
        """Returns a human-friendly label for this subnet."""
        cidr = str(self.cidr)
        # Note: there is a not-NULL check for the 'name' field, so this only
        # applies to unsaved objects.
        if self.name is None or self.name == "":
            return cidr
        if cidr not in self.name:
            return "%s (%s)" % (self.name, self.cidr)
        else:
            return self.name

    @property
    def space(self):
        """Backward compatibility shim to get the space for this subnet."""
        return self.vlan.space

    def get_ipnetwork(self) -> IPNetwork:
        return IPNetwork(self.cidr)

    def get_ip_version(self) -> int:
        return self.get_ipnetwork().version

    def update_cidr(self, cidr):
        cidr = str(cidr)
        # If the old name had the CIDR embedded in it, update that first.
        if self.name:
            self.name = self.name.replace(str(self.cidr), cidr)
        else:
            self.name = cidr
        self.cidr = cidr

    def __str__(self):
        return "%s:%s(vid=%s)" % (self.name, self.cidr, self.vlan.vid)

    def validate_gateway_ip(self):
        if self.gateway_ip is None or self.gateway_ip == '':
            return
        gateway_addr = IPAddress(self.gateway_ip)
        network = self.get_ipnetwork()
        if gateway_addr in network:
            # If the gateway is in the network, it is fine.
            return
        elif network.version == 6 and gateway_addr.is_link_local():
            # If this is an IPv6 network and the gateway is in the link-local
            # network (fe80::/64 -- required to be configured by the spec),
            # then it is also valid.
            return
        else:
            # The gateway is not valid for the network.
            message = "Gateway IP must be within CIDR range."
            raise ValidationError({'gateway_ip': [message]})

    def clean_fields(self, *args, **kwargs):
        # XXX mpontillo 2016-03-16: this function exists due to bug #1557767.
        # This workaround exists to prevent potential unintended consequences
        # of making the name optional.
        if (self.name is None or self.name == '') and self.cidr is not None:
            self.name = str(self.cidr)
        super().clean_fields(*args, **kwargs)

    def clean(self, *args, **kwargs):
        self.validate_gateway_ip()

    def delete(self, *args, **kwargs):
        # Check if DHCP is enabled on the VLAN this subnet is attached to.
        if self.vlan.dhcp_on and self.get_dynamic_ranges().exists():
            raise ValidationError(
                "Cannot delete a subnet that is actively servicing a dynamic "
                "IP range. (Delete the dynamic range or disable DHCP first.)")
        super().delete(*args, **kwargs)

    def _get_ranges_for_allocated_ips(self, ipnetwork: IPNetwork,
                                      ignore_discovered_ips: bool) -> set:
        """Returns a set of MAASIPRange objects created from the set of allocated
        StaticIPAddress objects.
        """
        # Note, the original implementation used .exclude() to filter,
        # but we'll filter at runtime so that prefetch_related in the
        # websocket works properly.
        ranges = set()
        for sip in self.staticipaddress_set.all():
            if sip.ip and not (ignore_discovered_ips and
                               (sip.alloc_type == IPADDRESS_TYPE.DISCOVERED)):
                ip = IPAddress(sip.ip)
                if ip in ipnetwork:
                    ranges.add(make_iprange(ip, purpose="assigned-ip"))
        return ranges

    def get_ipranges_in_use(self,
                            exclude_addresses: IPAddressExcludeList = None,
                            ranges_only: bool = False,
                            include_reserved: bool = True,
                            with_neighbours: bool = False,
                            ignore_discovered_ips: bool = False,
                            exclude_ip_ranges: list = None) -> MAASIPSet:
        """Returns a `MAASIPSet` of `MAASIPRange` objects which are currently
        in use on this `Subnet`.

        :param exclude_addresses: Additional addresses to consider "in use".
        :param ignore_discovered_ips: DISCOVERED addresses are not "in use".
        :param ranges_only: if True, filters out gateway IPs, static routes,
            DNS servers, and `exclude_addresses`.
        :param with_neighbours: If True, includes addresses learned from
            neighbour observation.
        """
        if exclude_addresses is None:
            exclude_addresses = []
        ranges = set()
        network = self.get_ipnetwork()
        if network.version == 6:
            # For most IPv6 networks, automatically reserve the range:
            #     ::1 - ::ffff:ffff
            # We expect the administrator will be using ::1 through ::ffff.
            # We plan to reserve ::1:0 through ::ffff:ffff for use by MAAS,
            # so that we can allocate addresses in the form:
            #     ::<node>:<child>
            # For now, just make sure IPv6 addresses are allocated from
            # *outside* both ranges, so that they won't conflict with addresses
            # reserved from this scheme in the future.
            first = str(IPAddress(network.first))
            first_plus_one = str(IPAddress(network.first + 1))
            second = str(IPAddress(network.first + 0xFFFFFFFF))
            if network.prefixlen == 64:
                ranges |= {
                    make_iprange(first_plus_one, second, purpose="reserved")
                }
            # Reserve the subnet router anycast address, except for /127 and
            # /128 networks. (See RFC 6164, and RFC 4291 section 2.6.1.)
            if network.prefixlen < 127:
                ranges |= {
                    make_iprange(first, first, purpose="rfc-4291-2.6.1")
                }
        ipnetwork = self.get_ipnetwork()
        if not ranges_only:
            if (self.gateway_ip is not None and self.gateway_ip != ''
                    and self.gateway_ip in ipnetwork):
                ranges |= {make_iprange(self.gateway_ip, purpose="gateway-ip")}
            if self.dns_servers is not None:
                ranges |= set(
                    make_iprange(server, purpose="dns-server")
                    for server in self.dns_servers if server in ipnetwork)
            for static_route in StaticRoute.objects.filter(source=self):
                ranges |= {
                    make_iprange(static_route.gateway_ip, purpose="gateway-ip")
                }
            ranges |= self._get_ranges_for_allocated_ips(
                ipnetwork, ignore_discovered_ips)
            ranges |= set(
                make_iprange(address, purpose="excluded")
                for address in exclude_addresses if address in network)
        if include_reserved:
            ranges |= self.get_reserved_maasipset(
                exclude_ip_ranges=exclude_ip_ranges)
        ranges |= self.get_dynamic_maasipset(
            exclude_ip_ranges=exclude_ip_ranges)
        if with_neighbours:
            ranges |= self.get_maasipset_for_neighbours()
        return MAASIPSet(ranges)

    def get_ipranges_available_for_reserved_range(
            self, exclude_ip_ranges: list = None):
        return self.get_ipranges_not_in_use(
            ranges_only=True, exclude_ip_ranges=exclude_ip_ranges)

    def get_ipranges_available_for_dynamic_range(self,
                                                 exclude_ip_ranges: list = None
                                                 ):
        return self.get_ipranges_not_in_use(
            ranges_only=False,
            ignore_discovered_ips=True,
            exclude_ip_ranges=exclude_ip_ranges)

    def get_ipranges_not_in_use(self,
                                exclude_addresses: IPAddressExcludeList = None,
                                ranges_only: bool = False,
                                ignore_discovered_ips: bool = False,
                                with_neighbours: bool = False,
                                exclude_ip_ranges: list = None) -> MAASIPSet:
        """Returns a `MAASIPSet` of ranges which are currently free on this
        `Subnet`.

        :param ranges_only: if True, filters out gateway IPs, static routes,
            DNS servers, and `exclude_addresses`.
        :param exclude_addresses: An iterable of addresses not to use.
        :param ignore_discovered_ips: DISCOVERED addresses are not "in use".
        :param with_neighbours: If True, includes addresses learned from
            neighbour observation.
        """
        if exclude_addresses is None:
            exclude_addresses = []
        in_use = self.get_ipranges_in_use(
            exclude_addresses=exclude_addresses,
            ranges_only=ranges_only,
            with_neighbours=with_neighbours,
            ignore_discovered_ips=ignore_discovered_ips,
            exclude_ip_ranges=exclude_ip_ranges)
        if self.managed or ranges_only:
            not_in_use = in_use.get_unused_ranges(self.get_ipnetwork())
        else:
            # The end result we want is a list of unused IP addresses *within*
            # reserved ranges. To get that result, we first need the full list
            # of unused IP addresses on the subnet. This is better illustrated
            # visually below.
            #
            # Legend:
            #     X:  in-use IP addresses
            #     R:  reserved range
            #     Rx: reserved range (with allocated, in-use IP address)
            #
            #             +----+----+----+----+----+----+
            # IP address: | 1  | 2  | 3  | 4  | 5  | 6  |
            #             +----+----+----+----+----+----+
            #     Usages: | X  |    | R  | Rx |    | X  |
            #             +----+----+----+----+----+----+
            #
            # We need a set that just contains `3` in this case. To get there,
            # first calculate the set of all unused addresses on the subnet,
            # then intersect that set with set of in-use addresses *excluding*
            # the reserved range, then calculate which addresses within *that*
            # set are unused:
            #                               +----+----+----+----+----+----+
            #                   IP address: | 1  | 2  | 3  | 4  | 5  | 6  |
            #                               +----+----+----+----+----+----+
            #                       unused: |    | U  |    |    | U  |    |
            #                               +----+----+----+----+----+----+
            #             unmanaged_in_use: | u  |    |    | u  |    | u  |
            #                               +----+----+----+----+----+----+
            #                 |= unmanaged: ===============================
            #                               +----+----+----+----+----+----+
            #             unmanaged_in_use: | u  | U  |    | u  | U  | u  |
            #                               +----+----+----+----+----+----+
            #          get_unused_ranges(): ===============================
            #                               +----+----+----+----+----+----+
            #                   not_in_use: |    |    | n  |    |    |    |
            #                               +----+----+----+----+----+----+
            unused = in_use.get_unused_ranges(
                self.get_ipnetwork(), purpose=MAASIPRANGE_TYPE.UNMANAGED)
            unmanaged_in_use = self.get_ipranges_in_use(
                exclude_addresses=exclude_addresses,
                ranges_only=ranges_only,
                include_reserved=False,
                with_neighbours=with_neighbours,
                ignore_discovered_ips=ignore_discovered_ips,
                exclude_ip_ranges=exclude_ip_ranges)
            unmanaged_in_use |= unused
            not_in_use = unmanaged_in_use.get_unused_ranges(
                self.get_ipnetwork(), purpose=MAASIPRANGE_TYPE.UNUSED)
        return not_in_use

    def get_maasipset_for_neighbours(self) -> MAASIPSet:
        """Return the observed neighbours in this subnet.

        :return: MAASIPSet of neighbours (with the "neighbour" purpose).
        """
        # Circular imports.
        from maasserver.models import Discovery
        # Note: we only need unknown IP addresses here, because the known
        # IP addresses should already be covered by get_ipranges_in_use().
        neighbours = Discovery.objects.filter(subnet=self).by_unknown_ip()
        neighbour_set = {
            make_iprange(neighbour.ip, purpose="neighbour")
            for neighbour in neighbours
        }
        return MAASIPSet(neighbour_set)

    def get_least_recently_seen_unknown_neighbour(self):
        """
        Returns the least recently seen unknown neighbour or this subnet.

        Useful when allocating an IP address, to safeguard against assigning
        an address another host is still using.

        :return: a `maasserver.models.Discovery` object
        """
        # Circular imports.
        from maasserver.models import Discovery
        # Note: for the purposes of this function, being in part of a "used"
        # range (such as a router IP address or reserved range) makes it
        # "known". So we need to avoid those here in order to avoid stepping
        # on network infrastructure, reserved ranges, etc.
        unused = self.get_ipranges_not_in_use(ignore_discovered_ips=True)
        least_recent_neighbours = Discovery.objects.filter(
            subnet=self).by_unknown_ip().order_by('last_seen')
        for neighbor in least_recent_neighbours:
            if neighbor.ip in unused:
                return neighbor
        return None

    def get_iprange_usage(self, with_neighbours=False) -> MAASIPSet:
        """Returns both the reserved and unreserved IP ranges in this Subnet.
        (This prevents a potential race condition that could occur if an IP
        address is allocated or deallocated between calls.)

        :returns: A tuple indicating the (reserved, unreserved) ranges.
        """
        reserved_ranges = self.get_ipranges_in_use()
        if with_neighbours is True:
            reserved_ranges |= self.get_maasipset_for_neighbours()
        return reserved_ranges.get_full_range(self.get_ipnetwork())

    def get_next_ip_for_allocation(
            self,
            exclude_addresses: Optional[Iterable] = None,
            avoid_observed_neighbours: bool = True):
        """Heuristic to return the "best" address from this subnet to use next.

        :param exclude_addresses: Optional list of addresses to exclude.
        :param avoid_observed_neighbours: Optional parameter to specify if
            known observed neighbours should be avoided. This parameter is not
            intended to be specified by a caller in production code; it is used
            internally to recursively call this method if the first allocation
            attempt fails.
        """
        if exclude_addresses is None:
            exclude_addresses = []
        free_ranges = self.get_ipranges_not_in_use(
            exclude_addresses=exclude_addresses,
            with_neighbours=avoid_observed_neighbours)
        if len(free_ranges) == 0 and avoid_observed_neighbours is True:
            # Try again recursively, but this time consider neighbours to be
            # "free" IP addresses. (We'll pick the least recently seen IP.)
            return self.get_next_ip_for_allocation(
                exclude_addresses, avoid_observed_neighbours=False)
        elif len(free_ranges) == 0:
            raise StaticIPAddressExhaustion(
                "No more IPs available in subnet: %s." % self.cidr)
        # The first time through this function, we aren't trying to avoid
        # observed neighbours. In fact, `free_ranges` only contains completely
        # unused ranges. So we don't need to check for the least recently seen
        # neighbour on the first pass.
        if avoid_observed_neighbours is False:
            # We tried considering neighbours as "in-use" addresses, but the
            # subnet is still full. So make an educated guess about which IP
            # address is least likely to be in-use.
            discovery = self.get_least_recently_seen_unknown_neighbour()
            if discovery is not None:
                maaslog.warning(
                    "Next IP address to allocate from '%s' has been observed "
                    "previously: %s was last claimed by %s via %s at %s." %
                    (self.label, discovery.ip, discovery.mac_address,
                     discovery.observer_interface.get_log_string(),
                     discovery.last_seen))
                return str(discovery.ip)
        # The purpose of this is to that we ensure we always get an IP address
        # from the *smallest* free contiguous range. This way, larger ranges
        # can be preserved in case they need to be used for applications
        # requiring them.
        free_range = min(free_ranges, key=attrgetter('num_addresses'))
        return str(IPAddress(free_range.first))

    def render_json_for_related_ips(self,
                                    with_username=True,
                                    with_summary=True):
        """Render a representation of this subnet's related IP addresses,
        suitable for converting to JSON. Optionally exclude user and node
        information."""
        ip_addresses = self.staticipaddress_set.all()
        if with_username:
            ip_addresses = ip_addresses.prefetch_related('user')
        if with_summary:
            ip_addresses = ip_addresses.prefetch_related(
                'interface_set',
                'interface_set__node',
                'bmc_set',
                'bmc_set__node_set',
                'dnsresource_set',
                'dnsresource_set__domain',
            )
        return sorted([
            ip.render_json(with_username=with_username,
                           with_summary=with_summary)
            for ip in ip_addresses if ip.ip
        ],
                      key=lambda json: IPAddress(json['ip']))

    def get_dynamic_ranges(self):
        return self.iprange_set.filter(type=IPRANGE_TYPE.DYNAMIC)

    def get_reserved_ranges(self):
        return self.iprange_set.filter(type=IPRANGE_TYPE.RESERVED)

    def is_valid_static_ip(self, *args, **kwargs):
        """Validates that the requested IP address is acceptable for allocation
        in this `Subnet` (assuming it has not already been allocated).

        Returns `True` if the IP address is acceptable, and `False` if not.

        Does not consider whether or not the IP address is already allocated,
        only whether or not it is in the proper network and range.

        :return: bool
        """
        try:
            self.validate_static_ip(*args, **kwargs)
        except MAASAPIException:
            return False
        return True

    def validate_static_ip(self, ip):
        """Validates that the requested IP address is acceptable for allocation
        in this `Subnet` (assuming it has not already been allocated).

        Raises `StaticIPAddressUnavailable` if the address is not acceptable.

        Does not consider whether or not the IP address is already allocated,
        only whether or not it is in the proper network and range.

        :raises StaticIPAddressUnavailable: If the IP address specified is not
            available for allocation.
        """
        if ip not in self.get_ipnetwork():
            raise StaticIPAddressOutOfRange(
                "%s is not within subnet CIDR: %s" % (ip, self.cidr))
        for iprange in self.get_reserved_maasipset():
            if ip in iprange:
                raise StaticIPAddressUnavailable(
                    "%s is within the reserved range from %s to %s" %
                    (ip, IPAddress(iprange.first), IPAddress(iprange.last)))
        for iprange in self.get_dynamic_maasipset():
            if ip in iprange:
                raise StaticIPAddressUnavailable(
                    "%s is within the dynamic range from %s to %s" %
                    (ip, IPAddress(iprange.first), IPAddress(iprange.last)))

    def get_reserved_maasipset(self, exclude_ip_ranges: list = None):
        if exclude_ip_ranges is None:
            exclude_ip_ranges = []
        reserved_ranges = MAASIPSet(iprange.get_MAASIPRange()
                                    for iprange in self.get_reserved_ranges()
                                    if iprange not in exclude_ip_ranges)
        return reserved_ranges

    def get_dynamic_maasipset(self, exclude_ip_ranges: list = None):
        if exclude_ip_ranges is None:
            exclude_ip_ranges = []
        dynamic_ranges = MAASIPSet(iprange.get_MAASIPRange()
                                   for iprange in self.get_dynamic_ranges()
                                   if iprange not in exclude_ip_ranges)
        return dynamic_ranges

    def get_dynamic_range_for_ip(self, ip):
        """Return `IPRange` for the provided `ip`."""
        # XXX mpontillo 2016-01-21: for some reason this query doesn't work.
        # I tried it both like this, and with:
        #     start_ip__gte=ip, and end_ip__lte=ip
        # return get_one(self.get_dynamic_ranges().extra(
        #        where=["start_ip >= inet '%s'" % ip,
        # ... which sounds a lot like comment 15 in:
        #     https://code.djangoproject.com/ticket/11442
        for iprange in self.get_dynamic_ranges():
            if ip in iprange.netaddr_iprange:
                return iprange
        return None

    def get_smallest_enclosing_sane_subnet(self):
        """Return the subnet that includes this subnet.

        It must also be at least big enough to be a parent in the RFC2317
        world (/24 in IPv4, /124 in IPv6).

        If no such subnet exists, return None.
        """
        find_rfc2137_parent_query = """
            SELECT * FROM maasserver_subnet
            WHERE
                %s << cidr AND (
                    (family(cidr) = 6 and masklen(cidr) <= 124) OR
                    (family(cidr) = 4 and masklen(cidr) <= 24))
            ORDER BY
                masklen(cidr) DESC
            LIMIT 1
            """
        for s in Subnet.objects.raw(find_rfc2137_parent_query, (self.cidr, )):
            return s
        return None

    def update_allocation_notification(self):
        # Workaround for edge cases in Django. (See bug #1702527.)
        if self.id is None:
            return
        ident = "ip_exhaustion__subnet_%d" % self.id
        # Circular imports.
        from maasserver.models import Config, Notification
        threshold = Config.objects.get_config(
            'subnet_ip_exhaustion_threshold_count')
        notification = Notification.objects.filter(ident=ident).first()
        delete_notification = False
        if threshold > 0:
            full_iprange = self.get_iprange_usage()
            statistics = IPRangeStatistics(full_iprange)
            # Check if there are less available IPs in the subnet than the
            # warning threshold.
            meets_warning_threshold = statistics.num_available <= threshold
            # Check if the warning threshold is appropriate relative to the
            # size of the subnet. It's pointless to warn about address
            # exhaustion on a /30, for example: the admin already knows it's
            # small, so we would just be annoying them.
            subnet_is_reasonably_large_relative_to_threshold = (
                threshold * 3 <= statistics.total_addresses)
            if (meets_warning_threshold
                    and subnet_is_reasonably_large_relative_to_threshold):
                notification_text = (
                    "IP address exhaustion imminent on subnet: %s. "
                    "There are %d free addresses out of %d "
                    "(%s used).") % (self.label, statistics.num_available,
                                     statistics.total_addresses,
                                     statistics.usage_percentage_string)
                if notification is None:
                    Notification.objects.create_warning_for_admins(
                        notification_text, ident=ident)
                else:
                    # Note: This will update the notification, but will not
                    # bring it back for those who have dismissed it. Maybe we
                    # should consider creating a new notification if the
                    # situation is now more severe, such as raise it to an
                    # error if it's half remaining threshold.
                    notification.message = notification_text
                    notification.save()
            else:
                delete_notification = True
        else:
            delete_notification = True
        if notification is not None and delete_notification:
            notification.delete()
Esempio n. 31
0

# Codigo da implementacao de um custom lookup exatamente como esta documentado para maior didatica

class Funcao(Lookup):
    lookup_name = 'func'

    def as_sql(self, qn, connection):
        lhs, lhs_params = self.process_lhs(qn, connection)
        rhs, rhs_params = self.process_rhs(qn, connection)
        params = lhs_params + rhs_params
        print params
        return '%s <> %s' % (lhs, rhs), params

# Avisando o Django sobre a implementacao do custom lookup
Field.register_lookup(Funcao)

# Codigo da implementacao de uma transformacao
# foram feitas algumas alteracoes da documentacao
# para se adequar ao template

class IdadeMenor(Transform):
    lookup_name = 'idmenor'

    def as_sql(self, qn, connection):
        lhs, params = qn.compile(self.lhs)
        return "ABS(%s)" % lhs, params

# Avisando o Django sobre a implementacao da transformacao
IntegerField.register_lookup(IdadeMenor)
Esempio n. 32
0
    def iterate_orders(self, form_data: dict):
        p_date = OrderPayment.objects.filter(
            order=OuterRef('pk'),
            state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED,
                       OrderPayment.PAYMENT_STATE_REFUNDED),
            payment_date__isnull=False).values('order').annotate(
                m=Max('payment_date')).values('m').order_by()
        p_providers = OrderPayment.objects.filter(
            order=OuterRef('pk'),
            state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED,
                       OrderPayment.PAYMENT_STATE_REFUNDED,
                       OrderPayment.PAYMENT_STATE_PENDING,
                       OrderPayment.PAYMENT_STATE_CREATED),
        ).values('order').annotate(
            m=GroupConcat('provider', delimiter=',')).values('m').order_by()
        i_numbers = Invoice.objects.filter(
            order=OuterRef('pk'), ).values('order').annotate(m=GroupConcat(
                'full_invoice_no', delimiter=', ')).values('m').order_by()

        s = OrderPosition.objects.filter(
            order=OuterRef('pk')).order_by().values('order').annotate(
                k=Count('id')).values('k')
        qs = Order.objects.filter(event__in=self.events).annotate(
            payment_date=Subquery(p_date, output_field=DateTimeField()),
            payment_providers=Subquery(p_providers, output_field=CharField()),
            invoice_numbers=Subquery(i_numbers, output_field=CharField()),
            pcnt=Subquery(
                s,
                output_field=IntegerField())).select_related('invoice_address')
        if form_data['paid_only']:
            qs = qs.filter(status=Order.STATUS_PAID)
        tax_rates = self._get_all_tax_rates(qs)

        headers = [
            _('Event slug'),
            _('Order code'),
            _('Order total'),
            _('Status'),
            _('Email'),
            _('Phone number'),
            _('Order date'),
            _('Order time'),
            _('Company'),
            _('Name'),
        ]
        name_scheme = PERSON_NAME_SCHEMES[
            self.event.settings.
            name_scheme] if not self.is_multievent else None
        if name_scheme and len(name_scheme['fields']) > 1:
            for k, label, w in name_scheme['fields']:
                headers.append(label)
        headers += [
            _('Address'),
            _('ZIP code'),
            _('City'),
            _('Country'),
            pgettext('address', 'State'),
            _('Custom address field'),
            _('VAT ID'),
            _('Date of last payment'),
            _('Fees'),
            _('Order locale')
        ]

        for tr in tax_rates:
            headers += [
                _('Gross at {rate} % tax').format(rate=tr),
                _('Net at {rate} % tax').format(rate=tr),
                _('Tax value at {rate} % tax').format(rate=tr),
            ]

        headers.append(_('Invoice numbers'))
        headers.append(_('Sales channel'))
        headers.append(_('Requires special attention'))
        headers.append(_('Comment'))
        headers.append(_('Positions'))
        headers.append(_('Payment providers'))
        if form_data.get('include_payment_amounts'):
            payment_methods = self._get_all_payment_methods(qs)
            for id, vn in payment_methods:
                headers.append(_('Paid by {method}').format(method=vn))

        yield headers

        full_fee_sum_cache = {
            o['order__id']: o['grosssum']
            for o in OrderFee.objects.values('tax_rate', 'order__id').order_by(
            ).annotate(grosssum=Sum('value'))
        }
        fee_sum_cache = {
            (o['order__id'], o['tax_rate']): o
            for o in OrderFee.objects.values('tax_rate', 'order__id').order_by(
            ).annotate(taxsum=Sum('tax_value'), grosssum=Sum('value'))
        }
        if form_data.get('include_payment_amounts'):
            payment_sum_cache = {
                (o['order__id'], o['provider']): o['grosssum']
                for o in OrderPayment.objects.values(
                    'provider', 'order__id').order_by().filter(state__in=[
                        OrderPayment.PAYMENT_STATE_CONFIRMED,
                        OrderPayment.PAYMENT_STATE_REFUNDED
                    ]).annotate(grosssum=Sum('amount'))
            }
            refund_sum_cache = {
                (o['order__id'], o['provider']): o['grosssum']
                for o in OrderRefund.objects.values(
                    'provider', 'order__id').order_by().filter(state__in=[
                        OrderRefund.REFUND_STATE_DONE,
                        OrderRefund.REFUND_STATE_TRANSIT
                    ]).annotate(grosssum=Sum('amount'))
            }
        sum_cache = {
            (o['order__id'], o['tax_rate']): o
            for o in OrderPosition.objects.values('tax_rate', 'order__id').
            order_by().annotate(taxsum=Sum('tax_value'), grosssum=Sum('price'))
        }

        yield self.ProgressSetTotal(total=qs.count())
        for order in qs.order_by('datetime').iterator():
            tz = pytz.timezone(
                self.event_object_cache[order.event_id].settings.timezone)

            row = [
                self.event_object_cache[order.event_id].slug,
                order.code,
                order.total,
                order.get_status_display(),
                order.email,
                str(order.phone) if order.phone else '',
                order.datetime.astimezone(tz).strftime('%Y-%m-%d'),
                order.datetime.astimezone(tz).strftime('%H:%M:%S'),
            ]
            try:
                row += [
                    order.invoice_address.company,
                    order.invoice_address.name,
                ]
                if name_scheme and len(name_scheme['fields']) > 1:
                    for k, label, w in name_scheme['fields']:
                        row.append(order.invoice_address.name_parts.get(k, ''))
                row += [
                    order.invoice_address.street,
                    order.invoice_address.zipcode,
                    order.invoice_address.city,
                    order.invoice_address.country
                    if order.invoice_address.country else
                    order.invoice_address.country_old,
                    order.invoice_address.state,
                    order.invoice_address.custom_field,
                    order.invoice_address.vat_id,
                ]
            except InvoiceAddress.DoesNotExist:
                row += [''] * (9 + (len(name_scheme['fields']) if name_scheme
                                    and len(name_scheme['fields']) > 1 else 0))

            row += [
                order.payment_date.astimezone(tz).strftime('%Y-%m-%d')
                if order.payment_date else '',
                full_fee_sum_cache.get(order.id) or Decimal('0.00'),
                order.locale,
            ]

            for tr in tax_rates:
                taxrate_values = sum_cache.get((order.id, tr), {
                    'grosssum': Decimal('0.00'),
                    'taxsum': Decimal('0.00')
                })
                fee_taxrate_values = fee_sum_cache.get(
                    (order.id, tr), {
                        'grosssum': Decimal('0.00'),
                        'taxsum': Decimal('0.00')
                    })

                row += [
                    taxrate_values['grosssum'] +
                    fee_taxrate_values['grosssum'],
                    (taxrate_values['grosssum'] - taxrate_values['taxsum'] +
                     fee_taxrate_values['grosssum'] -
                     fee_taxrate_values['taxsum']),
                    taxrate_values['taxsum'] + fee_taxrate_values['taxsum'],
                ]

            row.append(order.invoice_numbers)
            row.append(order.sales_channel)
            row.append(_('Yes') if order.checkin_attention else _('No'))
            row.append(order.comment or "")
            row.append(order.pcnt)
            row.append(', '.join([
                str(self.providers.get(p, p))
                for p in sorted(set((
                    order.payment_providers or '').split(',')))
                if p and p != 'free'
            ]))

            if form_data.get('include_payment_amounts'):
                payment_methods = self._get_all_payment_methods(qs)
                for id, vn in payment_methods:
                    row.append(
                        payment_sum_cache.get((order.id,
                                               id), Decimal('0.00')) -
                        refund_sum_cache.get((order.id, id), Decimal('0.00')))
            yield row
Esempio n. 33
0
 def __init__(self):
     IntegerField.__init__(self, default=0)
Esempio n. 34
0
    def get(self, request, format=None, **kwargs):
        get_params = dict(zip(request.GET.keys(), request.GET.values()))
        date_format = '%Y-%m-%d'

        created_at_from = datetime.datetime.strptime(
            get_params['from'], date_format).replace(tzinfo=pytz.utc)
        created_at_to_limit = datetime.datetime.strptime(get_params['to'], date_format).replace(tzinfo=pytz.utc) \
                              + timezone.timedelta(days=1)

        all_months_range_data = []
        first_date_range = True
        subcounty = request.user.village.parish.sub_county
        district = subcounty.county.district
        sub_counties = SubCounty.objects.filter(county__district=district)

        while created_at_from <= created_at_to_limit:
            '''We loop through all months for the data querried.
            we do some mutation on the dates so as to group the data in months '''
            created_at_to = self.generate_date_range(created_at_from,
                                                     created_at_to_limit,
                                                     first_date_range)

            all_subcounties = []

            if request.path == '/api/v1/mapping_encounters_stats':
                total_girls_in_all_subcounties = 0
                response = dict()
                response["district"] = district.name
                response["year"] = created_at_from.year
                response["month"] = created_at_from.strftime("%B")

                subvalues = SubCounty.objects.annotate(girls_count=Sum(
                    Case(
                        When(Q(parish__village__girl__created_at__gte=created_at_from) & Q(
                            parish__village__girl__created_at__lte=created_at_to), then=1),
                        output_field=IntegerField())), ).exclude(girls_count=None) \
                    .values('name', 'girls_count').filter(county__district=district)

                for subcounty in subvalues:
                    response["totalNumberOfGirlsMappedFrom" +
                             subcounty['name']] = subcounty['girls_count']
                    total_girls_in_all_subcounties += subcounty['girls_count']

                girls = Girl.objects.aggregate(
                    girls_count_12_15=Sum(
                        Case(When(Q(age__lte=15) & Q(age__lte=15)
                                  & Q(user__district=district)
                                  & Q(created_at__gte=created_at_from)
                                  & Q(created_at__lte=created_at_to),
                                  then=1),
                             output_field=IntegerField())),
                    girls_count_16_19=Sum(
                        Case(When(Q(age__gte=16) & Q(age__lte=19)
                                  & Q(user__district=district)
                                  & Q(created_at__gte=created_at_from)
                                  & Q(created_at__lte=created_at_to),
                                  then=1),
                             output_field=IntegerField())),
                    girls_count_20_24=Sum(
                        Case(When(Q(age__gte=20) & Q(age__lte=24)
                                  & Q(user__district=district)
                                  & Q(created_at__gte=created_at_from)
                                  & Q(created_at__lte=created_at_to),
                                  then=1),
                             output_field=IntegerField())))

                response["mappedGirlsInAgeGroup12_15"] = girls[
                    'girls_count_12_15']
                response["mappedGirlsInAgeGroup16_19"] = girls[
                    'girls_count_16_19']
                response["mappedGirlsInAgeGroup20_24"] = girls[
                    'girls_count_20_24']
                response["count"] = total_girls_in_all_subcounties
                response["subcounties"] = [
                    subcounty.name for subcounty in sub_counties
                ]

                all_months_range_data.append(response)
            elif request.path == '/api/v1/deliveries_stats':
                response = dict()
                deliveries = Delivery.objects.filter(
                    Q(girl__created_at__gte=created_at_from)
                    & Q(girl__created_at__lte=created_at_to))

                all_subcounties += [
                    delivery.girl.village.parish.sub_county
                    for delivery in deliveries
                    if delivery.user.district == district
                ]

                # remove duplicate subcounties
                all_subcounties = list(set(all_subcounties))

                response["subcounties"] = [
                    subcounty.name for subcounty in all_subcounties
                ]

                all_deliveries_total = 0

                for subcounty in all_subcounties:
                    delivery_total = Delivery.objects.filter(
                        girl__village__parish_id__in=[
                            parish.id for parish in subcounty.parish_set.all()
                        ]).count()
                    response["deliveriesFromSubcounty" +
                             subcounty.name] = delivery_total
                    all_deliveries_total += delivery_total

                response["count"] = all_deliveries_total

                all_months_range_data.append(response)
            created_at_from = created_at_to + timezone.timedelta(days=1)
            first_date_range = False

        return Response(all_months_range_data, 200)