def contribute_to_class(self, model, name):
        super(MongoUserManager, self).contribute_to_class(model, name)
        self.dj_model = self.model
        self.model = get_user_document()

        self.dj_model.USERNAME_FIELD = self.model.USERNAME_FIELD
        username = CharField(_('username'), max_length=30, unique=True)
        username.contribute_to_class(self.dj_model, self.dj_model.USERNAME_FIELD)

        self.dj_model.REQUIRED_FIELDS = self.model.REQUIRED_FIELDS
        for name in self.dj_model.REQUIRED_FIELDS:
            field = CharField(_(name), max_length=30)
            field.contribute_to_class(self.dj_model, name)

        is_staff = BooleanField(_('is_staff'), default=False)
        is_staff.contribute_to_class(self.dj_model, 'is_staff')

        is_active = BooleanField(_('is_active'), default=False)
        is_active.contribute_to_class(self.dj_model, 'is_active')

        is_superuser = BooleanField(_('is_superuser'), default=False)
        is_superuser.contribute_to_class(self.dj_model, 'is_superuser')

        last_login = DateTimeField(_('last_login'), auto_now_add=True)
        last_login.contribute_to_class(self.dj_model, 'last_login')

        date_joined = DateTimeField(_('date_joined'), auto_now_add=True)
        date_joined.contribute_to_class(self.dj_model, 'date_joined')
Example #2
0
class Url(Model):
    short_id = SlugField(primary_key=True)
    url = URLField(max_length=200)
    pub_date = DateTimeField(auto_now=True)
    is_expired = BooleanField(default=False)
    redirect_count = IntegerField(default=0)
Example #3
0
    def get_context_data(self, **kwargs):
        ctx = super().get_context_data(**kwargs)
        tz = timezone.get_current_timezone()

        if 'latest' in self.request.GET:
            clear_cache(self.request.event)

        subevent = None
        if self.request.GET.get("subevent", "") != "" and self.request.event.has_subevents:
            i = self.request.GET.get("subevent", "")
            try:
                subevent = self.request.event.subevents.get(pk=i)
            except SubEvent.DoesNotExist:
                pass

        cache = self.request.event.cache
        ckey = str(subevent.pk) if subevent else 'all'

        p_date = OrderPayment.objects.filter(
            order=OuterRef('pk'),
            state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED),
            payment_date__isnull=False
        ).values('order').annotate(
            m=Max('payment_date')
        ).values(
            'm'
        ).order_by()
        op_date = OrderPayment.objects.filter(
            order=OuterRef('order'),
            state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED),
            payment_date__isnull=False
        ).values('order').annotate(
            m=Max('payment_date')
        ).values(
            'm'
        ).order_by()

        # Orders by day
        ctx['obd_data'] = cache.get('statistics_obd_data' + ckey)
        if not ctx['obd_data']:
            oqs = Order.objects.annotate(payment_date=Subquery(p_date, output_field=DateTimeField()))
            if subevent:
                oqs = oqs.filter(all_positions__subevent_id=subevent, all_positions__canceled=False).distinct()

            ordered_by_day = {}
            for o in oqs.filter(event=self.request.event).values('datetime'):
                day = o['datetime'].astimezone(tz).date()
                ordered_by_day[day] = ordered_by_day.get(day, 0) + 1
            paid_by_day = {}
            for o in oqs.filter(event=self.request.event, payment_date__isnull=False).values('payment_date'):
                day = o['payment_date'].astimezone(tz).date()
                paid_by_day[day] = paid_by_day.get(day, 0) + 1

            data = []
            for d in dateutil.rrule.rrule(
                    dateutil.rrule.DAILY,
                    dtstart=min(ordered_by_day.keys()) if ordered_by_day else datetime.date.today(),
                    until=max(
                        max(ordered_by_day.keys() if paid_by_day else [datetime.date.today()]),
                        max(paid_by_day.keys() if paid_by_day else [datetime.date(1970, 1, 1)])
                    )):
                d = d.date()
                data.append({
                    'date': d.strftime('%Y-%m-%d'),
                    'ordered': ordered_by_day.get(d, 0),
                    'paid': paid_by_day.get(d, 0)
                })

            ctx['obd_data'] = json.dumps(data)
            cache.set('statistics_obd_data' + ckey, ctx['obd_data'])

        # Orders by product
        ctx['obp_data'] = cache.get('statistics_obp_data' + ckey)
        if not ctx['obp_data']:
            opqs = OrderPosition.objects
            if subevent:
                opqs = opqs.filter(subevent=subevent)
            num_ordered = {
                p['item']: p['cnt']
                for p in (opqs
                          .filter(order__event=self.request.event)
                          .values('item')
                          .annotate(cnt=Count('id')).order_by())
            }
            num_paid = {
                p['item']: p['cnt']
                for p in (opqs
                          .filter(order__event=self.request.event, order__status=Order.STATUS_PAID)
                          .values('item')
                          .annotate(cnt=Count('id')).order_by())
            }
            item_names = {
                i.id: str(i)
                for i in Item.objects.filter(event=self.request.event)
            }
            ctx['obp_data'] = json.dumps([
                {
                    'item': item_names[item],
                    'item_short': item_names[item] if len(item_names[item]) < 15 else (item_names[item][:15] + "…"),
                    'ordered': cnt,
                    'paid': num_paid.get(item, 0)
                } for item, cnt in num_ordered.items()
            ])
            cache.set('statistics_obp_data' + ckey, ctx['obp_data'])

        ctx['rev_data'] = cache.get('statistics_rev_data' + ckey)
        if not ctx['rev_data']:
            rev_by_day = {}
            if subevent:
                for o in OrderPosition.objects.annotate(
                        payment_date=Subquery(op_date, output_field=DateTimeField())
                ).filter(order__event=self.request.event,
                         subevent=subevent,
                         order__status=Order.STATUS_PAID,
                         payment_date__isnull=False).values('payment_date', 'price'):
                    day = o['payment_date'].astimezone(tz).date()
                    rev_by_day[day] = rev_by_day.get(day, 0) + o['price']
            else:
                for o in Order.objects.annotate(
                        payment_date=Subquery(p_date, output_field=DateTimeField())
                ).filter(event=self.request.event,
                         status=Order.STATUS_PAID,
                         payment_date__isnull=False).values('payment_date', 'total'):
                    day = o['payment_date'].astimezone(tz).date()
                    rev_by_day[day] = rev_by_day.get(day, 0) + o['total']

            data = []
            total = 0
            for d in dateutil.rrule.rrule(
                    dateutil.rrule.DAILY,
                    dtstart=min(rev_by_day.keys() if rev_by_day else [datetime.date.today()]),
                    until=max(rev_by_day.keys() if rev_by_day else [datetime.date.today()])):
                d = d.date()
                total += float(rev_by_day.get(d, 0))
                data.append({
                    'date': d.strftime('%Y-%m-%d'),
                    'revenue': round(total, 2),
                })
            ctx['rev_data'] = json.dumps(data)
            cache.set('statistics_rev_data' + ckey, ctx['rev_data'])

        ctx['has_orders'] = self.request.event.orders.exists()

        ctx['seats'] = {}

        if not self.request.event.has_subevents or (ckey != "all" and subevent):
            ev = subevent or self.request.event
            if ev.seating_plan_id is not None:
                seats_qs = ev.free_seats(sales_channel=None, include_blocked=True)
                ctx['seats']['blocked_seats'] = seats_qs.filter(blocked=True).count()
                ctx['seats']['free_seats'] = seats_qs.filter(blocked=False).count()
                ctx['seats']['purchased_seats'] = \
                    ev.seats.count() - ctx['seats']['blocked_seats'] - ctx['seats']['free_seats']

                seats_qs = seats_qs.values('product', 'blocked').annotate(count=Count('id'))\
                    .order_by('product__category__position', 'product__position', 'product', 'blocked')

                ctx['seats']['products'] = {}
                ctx['seats']['stats'] = {}
                item_cache = {i.pk: i for i in
                              self.request.event.items.annotate(has_variations=Count('variations')).filter(
                                  pk__in={p['product'] for p in seats_qs if p['product']}
                              )}
                item_cache[None] = None

                for item in seats_qs:
                    product = item_cache[item['product']]
                    if item_cache[item['product']] not in ctx['seats']['products']:
                        price = None
                        if product and product.has_variations:
                            price = product.variations.filter(
                                active=True
                            ).aggregate(Min('default_price'))['default_price__min']
                        if product and not price:
                            price = product.default_price
                        if not price:
                            price = Decimal('0.00')

                        ctx['seats']['products'][product] = {
                            'free': {
                                'seats': 0,
                                'potential': Decimal('0.00'),
                            },
                            'blocked': {
                                'seats': 0,
                                'potential': Decimal('0.00'),
                            },
                            'price': price,
                        }
                    data = ctx['seats']['products'][product]

                    if item['blocked']:
                        data['blocked']['seats'] = item['count']
                        data['blocked']['potential'] = item['count'] * data['price']
                    else:
                        data['free']['seats'] = item['count']
                        data['free']['potential'] = item['count'] * data['price']

        return ctx
Example #4
0
    def iterate_orders(self, form_data: dict):
        p_date = OrderPayment.objects.filter(
            order=OuterRef('pk'),
            state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED),
            payment_date__isnull=False
        ).values('order').annotate(
            m=Max('payment_date')
        ).values(
            'm'
        ).order_by()
        p_providers = OrderPayment.objects.filter(
            order=OuterRef('pk'),
            state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED,
                       OrderPayment.PAYMENT_STATE_PENDING, OrderPayment.PAYMENT_STATE_CREATED),
        ).values('order').annotate(
            m=GroupConcat('provider', delimiter=',')
        ).values(
            'm'
        ).order_by()
        i_numbers = Invoice.objects.filter(
            order=OuterRef('pk'),
        ).values('order').annotate(
            m=GroupConcat('full_invoice_no', delimiter=', ')
        ).values(
            'm'
        ).order_by()

        s = OrderPosition.objects.filter(
            order=OuterRef('pk')
        ).order_by().values('order').annotate(k=Count('id')).values('k')
        qs = Order.objects.filter(event__in=self.events).annotate(
            payment_date=Subquery(p_date, output_field=DateTimeField()),
            payment_providers=Subquery(p_providers, output_field=CharField()),
            invoice_numbers=Subquery(i_numbers, output_field=CharField()),
            pcnt=Subquery(s, output_field=IntegerField())
        ).select_related('invoice_address')

        qs = self._date_filter(qs, form_data, rel='')

        if form_data['paid_only']:
            qs = qs.filter(status=Order.STATUS_PAID)
        tax_rates = self._get_all_tax_rates(qs)

        headers = [
            _('Event slug'), _('Order code'), _('Order total'), _('Status'), _('Email'), _('Phone number'), _('Order date'),
            _('Order time'), _('Company'), _('Name'),
        ]
        name_scheme = PERSON_NAME_SCHEMES[self.event.settings.name_scheme] if not self.is_multievent else None
        if name_scheme and len(name_scheme['fields']) > 1:
            for k, label, w in name_scheme['fields']:
                headers.append(label)
        headers += [
            _('Address'), _('ZIP code'), _('City'), _('Country'), pgettext('address', 'State'),
            _('Custom address field'), _('VAT ID'), _('Date of last payment'), _('Fees'), _('Order locale')
        ]

        for tr in tax_rates:
            headers += [
                _('Gross at {rate} % tax').format(rate=tr),
                _('Net at {rate} % tax').format(rate=tr),
                _('Tax value at {rate} % tax').format(rate=tr),
            ]

        headers.append(_('Invoice numbers'))
        headers.append(_('Sales channel'))
        headers.append(_('Requires special attention'))
        headers.append(_('Comment'))
        headers.append(_('Positions'))
        headers.append(_('Payment providers'))
        if form_data.get('include_payment_amounts'):
            payment_methods = self._get_all_payment_methods(qs)
            for id, vn in payment_methods:
                headers.append(_('Paid by {method}').format(method=vn))

        yield headers

        full_fee_sum_cache = {
            o['order__id']: o['grosssum'] for o in
            OrderFee.objects.values('tax_rate', 'order__id').order_by().annotate(grosssum=Sum('value'))
        }
        fee_sum_cache = {
            (o['order__id'], o['tax_rate']): o for o in
            OrderFee.objects.values('tax_rate', 'order__id').order_by().annotate(
                taxsum=Sum('tax_value'), grosssum=Sum('value')
            )
        }
        if form_data.get('include_payment_amounts'):
            payment_sum_cache = {
                (o['order__id'], o['provider']): o['grosssum'] for o in
                OrderPayment.objects.values('provider', 'order__id').order_by().filter(
                    state__in=[OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED]
                ).annotate(
                    grosssum=Sum('amount')
                )
            }
            refund_sum_cache = {
                (o['order__id'], o['provider']): o['grosssum'] for o in
                OrderRefund.objects.values('provider', 'order__id').order_by().filter(
                    state__in=[OrderRefund.REFUND_STATE_DONE, OrderRefund.REFUND_STATE_TRANSIT]
                ).annotate(
                    grosssum=Sum('amount')
                )
            }
        sum_cache = {
            (o['order__id'], o['tax_rate']): o for o in
            OrderPosition.objects.values('tax_rate', 'order__id').order_by().annotate(
                taxsum=Sum('tax_value'), grosssum=Sum('price')
            )
        }

        yield self.ProgressSetTotal(total=qs.count())
        for order in qs.order_by('datetime').iterator():
            tz = pytz.timezone(self.event_object_cache[order.event_id].settings.timezone)

            row = [
                self.event_object_cache[order.event_id].slug,
                order.code,
                order.total,
                order.get_status_display(),
                order.email,
                str(order.phone) if order.phone else '',
                order.datetime.astimezone(tz).strftime('%Y-%m-%d'),
                order.datetime.astimezone(tz).strftime('%H:%M:%S'),
            ]
            try:
                row += [
                    order.invoice_address.company,
                    order.invoice_address.name,
                ]
                if name_scheme and len(name_scheme['fields']) > 1:
                    for k, label, w in name_scheme['fields']:
                        row.append(
                            order.invoice_address.name_parts.get(k, '')
                        )
                row += [
                    order.invoice_address.street,
                    order.invoice_address.zipcode,
                    order.invoice_address.city,
                    order.invoice_address.country if order.invoice_address.country else
                    order.invoice_address.country_old,
                    order.invoice_address.state,
                    order.invoice_address.custom_field,
                    order.invoice_address.vat_id,
                ]
            except InvoiceAddress.DoesNotExist:
                row += [''] * (9 + (len(name_scheme['fields']) if name_scheme and len(name_scheme['fields']) > 1 else 0))

            row += [
                order.payment_date.astimezone(tz).strftime('%Y-%m-%d') if order.payment_date else '',
                full_fee_sum_cache.get(order.id) or Decimal('0.00'),
                order.locale,
            ]

            for tr in tax_rates:
                taxrate_values = sum_cache.get((order.id, tr), {'grosssum': Decimal('0.00'), 'taxsum': Decimal('0.00')})
                fee_taxrate_values = fee_sum_cache.get((order.id, tr),
                                                       {'grosssum': Decimal('0.00'), 'taxsum': Decimal('0.00')})

                row += [
                    taxrate_values['grosssum'] + fee_taxrate_values['grosssum'],
                    (
                        taxrate_values['grosssum'] - taxrate_values['taxsum'] +
                        fee_taxrate_values['grosssum'] - fee_taxrate_values['taxsum']
                    ),
                    taxrate_values['taxsum'] + fee_taxrate_values['taxsum'],
                ]

            row.append(order.invoice_numbers)
            row.append(order.sales_channel)
            row.append(_('Yes') if order.checkin_attention else _('No'))
            row.append(order.comment or "")
            row.append(order.pcnt)
            row.append(', '.join([
                str(self.providers.get(p, p)) for p in sorted(set((order.payment_providers or '').split(',')))
                if p and p != 'free'
            ]))

            if form_data.get('include_payment_amounts'):
                payment_methods = self._get_all_payment_methods(qs)
                for id, vn in payment_methods:
                    row.append(
                        payment_sum_cache.get((order.id, id), Decimal('0.00')) -
                        refund_sum_cache.get((order.id, id), Decimal('0.00'))
                    )
            yield row
Example #5
0
class PredictDataset(TimeStampedModel):
    """An uploaded predict dataset"""
    BASE_DIR = settings.TB_SHARED_DATAFILE_DIRECTORY

    FILE_TYPE_VCF = 'vcf'
    FILE_TYPE_FASTQ = 'fastq'
    FILE_TYPE_FASTQ2 = 'fastq-pair'
    FILE_TYPE_MANUAL = 'manual'
    FILE_TYPES = [
        (FILE_TYPE_VCF, 'Variant Call Format (VCF)'),
        (FILE_TYPE_FASTQ, 'FastQ Single Ended Nucleotide Sequence'),
        (FILE_TYPE_FASTQ2, 'FastQ Pair Ended Nucleotide Sequences'),
        (FILE_TYPE_MANUAL, 'Mutations Manual Entry'),
    ]

    user = ForeignKey(settings.AUTH_USER_MODEL,
                      related_name='datasets',
                      null=True,
                      on_delete=SET_NULL)
    md5 = CharField(max_length=40, blank=True, db_index=True,\
            help_text='auto-filled on save')
    title = CharField('Dataset Title', max_length=255)
    file_type = CharField(choices=FILE_TYPES, max_length=25)
    delete_sources = BooleanField(default=False,\
        help_text="If this is checked, we will delete all your input files"\
            " downloaded from dropbox after running the predict.")
    has_notified = BooleanField(default=False,\
        help_text="Has this predict messaged the user about the status of their jobs.")

    description = TextField('Dataset description')
    file_directory = CharField(max_length=255, blank=True)

    status = CharField('Status',
                       max_length=10,
                       default='',
                       choices=STATUS_CHOICES)
    strains_count = IntegerField("Number of strains total", default=0)
    strains_ready = IntegerField("Number of strains ready", default=0)
    last_action = DateTimeField("Last Completed Action", null=True, blank=True)

    has_prediction = BooleanField('Has prediction', default=False)
    has_lineages = BooleanField('Has lineages', default=False)
    has_output_files = BooleanField('Has any output files', default=False)

    class Meta:
        ordering = ('-created', 'title')

    def __str__(self):
        return str(self.title)

    @property
    def parent(self):
        from .views import Datasets
        return Datasets

    def get_absolute_url(self):
        """Return a link to thedataset view"""
        if self.md5:
            return reverse('predict:view_single_dataset',
                           kwargs=dict(slug=self.md5))
        return '/'

    def get_status(self):
        """Returns the status as a string"""
        return STATUS[self.status][0]

    def get_status_level(self):
        """Returns the btn/bootstrap color level for this status"""
        return STATUS[self.status][1]

    def update_status(self):
        """Update all the cumulative statuses"""
        total = 0
        previous = 100
        statuses = defaultdict(int)

        self.status = 'RUN_NONE'
        self.has_prediction = False
        self.has_lineages = False
        self.has_output_files = False
        self.last_action = None

        for strain in self.strains.all():
            statuses[strain.status] += 1
            total += 1

            if STATUS[strain.status][2] < previous:
                self.status = strain.status
                previous = STATUS[self.status][2]

            if not self.has_prediction and strain.has_prediction:
                self.has_prediction = True

            if not self.has_lineages and strain.has_lineage:
                self.has_lineages = True

            if not self.has_output_files and list(strain.output_files):
                self.has_output_files = True

            try:
                dt = strain.get_time_taken()
                if not self.last_action or (dt and dt > self.last_action):
                    self.last_action = dt
            except ValueError:
                pass

        self.strains_count = total
        self.strains_ready = statuses.get('READY', 0)

    @property
    def statuses(self):
        """Calculate the strains here"""
        # We may want to combine this with the above status update somehow.
        statuses = defaultdict(int)
        for strain in self.strains.all():
            statuses[strain.status] += 1

        if not statuses:
            statuses['RUN_NONE'] = 1

        return [{
            'code': status,
            'count': count,
            'label': STATUS[status][0],
            'level': STATUS[status][1],
            'pc': "{:.2f}".format(count / self.strains_count * 100),
        } for (status, count) in statuses.items()]

    @property
    def directory_exists(self):
        """Returns true if the file_directory exists"""
        return os.path.isdir(self.file_directory)

    @property
    def time_taken(self):
        if not self.last_action:
            return '-'
        # TODO: awkward way to format time
        # removes microseconds from output
        return str(self.last_action - self.created).split('.')[0]

    def is_manual(self):
        """Return true if this dataset is a manual input (rather than a file based input)"""
        return self.file_type == 'manual'

    def get_heatmap(self):
        """Return data in the heatmap format with embeded graphs"""
        output = {
            'rows': [],
            'cols': [],
        }
        for strain in self.strains.filter(results__isnull=True):
            try:
                strain.generate_results()
            except PredictParsingError as err:
                output['rows'].append({'name': strain, 'error': str(err)})
                continue

        err_q = Q(drug__isnull=True)
        strains = defaultdict(list)
        qset = PredictResult.objects.filter(strain__dataset=self)
        vset = qset.exclude(err_q)\
                   .values_list('id', 'strain__name', 'drug__code',\
                                'false_positive', 'false_negative', 'probability', 'prediction')

        errors = dict(qset.filter(err_q).values_list('strain__name', 'error'))

        for pk, strain, drug, fpos, fneg, prob, pred in vset:
            cols = strains[strain]
            if drug not in output['cols']:
                output['cols'].append(drug)
                for ocol in strains.values():
                    if len(ocol) < len(output['cols']):
                        ocol.append(None)

        for pk, strain, drug, fpos, fneg, prob, pred in vset:
            index = output['cols'].index(drug)
            cols = strains[strain]
            cols.extend([None] * (len(output['cols']) - len(cols)))
            cols[index] = {
                'result_id': pk,
                'name': drug,
                'false_positive': fpos,
                'false_negative': fneg,
                'dr_probability': prob,
                'dr_prediction': pred,
            }
        for strain, cols in strains.items():
            for x, drug in enumerate(output['cols']):
                if x >= len(cols) or not cols[x] or not cols[x].get(
                        'name', None):
                    cols[x] = {'result_id': None, 'name': drug}
            output['rows'].append({'name': strain, 'cols': cols})
        for strain, err in errors.items():
            if strain not in strains:
                output['rows'].append({'name': strain, 'error': err})

        return output

    def user_name(self):
        """Return the uploader's username"""
        if self.user:
            return self.user.username
        return 'n/a'

    def user_affiliation(self):
        """Return the uploader's addiliation"""
        if self.user:
            return self.user.affiliation
        return 'n/a'

    def user_email(self):
        """Return the uploader's email address"""
        if self.user:
            return self.user.email
        return 'n/a'

    def save(self, *args, **kwargs):
        """Override the save function to populate some fields"""
        if not self.id:
            super(PredictDataset, self).save(*args, **kwargs)

        if not self.md5:
            key = '{}{}'.format(self.id, self.title)
            self.md5 = md5(key.encode('utf8')).hexdigest()

        if not self.file_directory:
            # We make a new directory in the BASE_DIR based on this object's
            # primary-key ID padded with zeros to make them fixed width.
            job_name = 'tbdata_' + str(self.id).zfill(8)
            self.file_directory = join(self.BASE_DIR, job_name)
            if not isdir(self.file_directory):
                os.makedirs(self.file_directory)

        self.update_status()

        return super(PredictDataset, self).save(*args, **kwargs)

    def get_full_json(self):
        """Returns the dataset as a serialised json"""
        return serializers.serialize('json',
                                     PredictDataset.objects.filter(id=self.id))

    def lineages(self):
        """Get a table of lineages"""
        header = []
        results = []
        for strain in self.strains.all():
            columns = [''] * len(header)
            for name, lineage in strain.lineages():
                if name in header:
                    columns[header.index(name)] = lineage
                else:
                    header.append(name)
                    columns.append(lineage)
            results.append({
                'strain': str(strain),
                'cols': columns,
            })
        for row in results:
            row['cols'] += [''] * (len(header) - len(row['cols']))
        return {
            'header': header,
            'rows': results,
        }
Example #6
0
class Survey(Model):
    """
    read_only: Keep original data(read_only=True). Modify data(read_only=False).
    """

    farmer_id = CharField(max_length=12, verbose_name=_('Farmer Id'))
    farmer_name = CharField(null=True,
                            blank=True,
                            max_length=10,
                            verbose_name=_('Name'))
    total_pages = IntegerField(verbose_name=_('Total Pages'))
    page = IntegerField(verbose_name=_('Page'))
    origin_class = IntegerField(null=True,
                                blank=True,
                                verbose_name=_('Origin Class'))
    hire = BooleanField(default=False, verbose_name=_('Hire'))
    non_hire = BooleanField(default=False, verbose_name=_('Non Hire'))
    lacks = ManyToManyField('surveys18.Lack',
                            blank=True,
                            related_name='surveys18',
                            verbose_name=_('Lack'))
    management_types = ManyToManyField('surveys18.ManagementType',
                                       blank=True,
                                       related_name='surveys',
                                       verbose_name=_('Management Types'))
    note = TextField(null=True, blank=True, verbose_name=_('Note'))
    is_updated = BooleanField(default=False, verbose_name=_('Is Updated'))
    readonly = BooleanField(default=True, verbose_name=_('Read Only'))

    investigator = ForeignKey(settings.AUTH_USER_MODEL,
                              null=True,
                              blank=True,
                              on_delete=CASCADE,
                              verbose_name=_('Investigator'))
    date = DateField(null=True,
                     blank=True,
                     verbose_name=_('Investigation Date'))
    distance = IntegerField(null=True,
                            blank=True,
                            verbose_name=_('Investigation Distance(km)'))
    period = IntegerField(null=True,
                          blank=True,
                          verbose_name=_('Investigation Period'))

    update_time = DateTimeField(auto_now=True,
                                auto_now_add=False,
                                null=True,
                                blank=True,
                                verbose_name=_('Updated'))

    review_logs = GenericRelation(ReviewLog, related_query_name='survey')

    class Meta:
        verbose_name = _('Survey')
        verbose_name_plural = _('Survey')

    def __str__(self):
        return self.farmer_id

    def __unicode__(self):
        return self.farmer_id
Example #7
0
 def __init__(self, *args, **kwargs):
     kwargs.setdefault('editable', False)
     kwargs.setdefault('blank', True)
     kwargs.setdefault('default', datetime_now)
     DateTimeField.__init__(self, *args, **kwargs)
Example #8
0
 def __init__(self, *args, **kwargs):
     kwargs.setdefault("editable", False)
     kwargs.setdefault("blank", True)
     kwargs.setdefault("auto_now_add", True)
     DateTimeField.__init__(self, *args, **kwargs)
Example #9
0
    def changelist_view(self, request, extra_context=None):
        response = super(ClosingDistributionAdmin, self).changelist_view(
            request,
            extra_context=extra_context,
        )

        try:
            qs = response.context_data['cl'].queryset.filter(closed=True)
        except (AttributeError, KeyError):
            return response

        metrics = {
            'total':
            Count('id'),
            'total_sale_price':
            Sum(
                F('ri_proceeds') + F('city_proceeds') +
                F('city_loan_proceeds')),
            'total_city_proceeds':
            Sum('city_proceeds'),
            'total_city_loan_proceeds':
            Sum('city_loan_proceeds'),
            'total_ri_proceeds':
            Sum('ri_proceeds'),
        }

        response.context_data['summary'] = list(
            qs.values('application__application_type').annotate(
                **metrics).order_by('-application__application_type'))

        response.context_data['summary_total'] = dict(qs.aggregate(**metrics))

        period = get_next_in_date_hierarchy(
            request,
            self.date_hierarchy,
        )

        response.context_data['scale'] = period
        summary_over_time = qs.annotate(period=Trunc(
            'date_time',
            period,
            output_field=DateTimeField(),
        ), ).values('period').annotate(total=Sum(
            F('ri_proceeds') + F('city_proceeds') + F('city_loan_proceeds')),
                                       count=Count('id')).order_by('period')

        summary_range = summary_over_time.aggregate(
            low=Min('count'),
            high=Max('count'),
        )
        high = summary_range.get('high', 0)
        low = summary_range.get('low', 0)

        response.context_data['summary_over_time'] = [{
            'period': x['period'],
            'total': x['total'] or 0,
            'count': x['count'],
            'high': high,
            'low': low,
            'pct': \
                float( float(x['count'] - 0) / float(high-0) ) * 100,# if x['count'] != low else 1,
        } for x in summary_over_time]

        return response
Example #10
0
class MonitorQueueElement(Model):
    sequence = ForeignKey(Sequence, on_delete=CASCADE)
    queued = DateTimeField(auto_now=True)
    monitor_setting = CharField(max_length=1, choices=MONITOR_SETTINGS)
Example #11
0
File: admin.py Project: JRubics/ISA
    def changelist_view(self, request, extra_context=None):
        response = super().changelist_view(
            request,
            extra_context=extra_context,
        )

        try:
            qs = response.context_data['cl'].queryset
        except (AttributeError, KeyError):
            return response

        metrics = {
            'total': Count('id'),
            'total_sales': Sum('price'),
        }

        if not request.user.is_superuser:
            qs = qs.filter(
                flight__avio_company=request.user.adminuser.avio_admin)

        row = list(qs.values('flight').annotate(**metrics).order_by('flight'))

        flights = qs.values('flight').annotate(
            **metrics).order_by('flight').values('flight')
        flights = Flight.objects.filter(pk__in=flights)
        flight_summary = zip(flights, row)
        response.context_data['flight_summary'] = flight_summary

        response.context_data['summary_total'] = dict(qs.aggregate(**metrics))

        period = get_next_in_date_hierarchy(request, self.date_hierarchy)
        response.context_data['period'] = period

        summary_over_time = qs.annotate(period=Trunc(
            'time', period,
            output_field=DateTimeField()), ).values('period').annotate(
                total=Sum('price')).order_by('period')
        tickets_sold = qs.annotate(period=Trunc(
            'time', period,
            output_field=DateTimeField()), ).values('period').annotate(
                total=Count('id')).order_by('period')

        if period == "month":
            response.context_data['label_list'] = json.dumps([
                'January', 'February', 'March', 'April', 'May', 'June', 'July',
                'August', 'September', 'October', 'Novemer', 'December'
            ])
            val_list = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
            tic_list = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
            for x in summary_over_time:
                val_list[int(x['period'].strftime('%m')) - 1] = x['total']
            response.context_data['val_list'] = json.dumps(val_list)

            for x in tickets_sold:
                tic_list[int(x['period'].strftime('%m')) - 1] = x['total']
            response.context_data['tic_list'] = json.dumps(tic_list)

            response.context_data['week'] = True
            week_label_list = []
            for x in range(1, 53):
                week_label_list.append(str(x) + "week")
            response.context_data['week_label_list'] = json.dumps(
                week_label_list)
            summary_over_time = qs.annotate(period=Trunc(
                'time', 'week',
                output_field=DateTimeField()), ).values('period').annotate(
                    total=Sum('price')).order_by('period')
            tickets_sold = qs.annotate(period=Trunc(
                'time', 'week',
                output_field=DateTimeField()), ).values('period').annotate(
                    total=Count('id')).order_by('period')

            week_val_list = [0] * 52
            week_tic_list = [0] * 52
            for x in summary_over_time:
                week_val_list[int(x['period'].strftime('%U'))] = x['total']
            response.context_data['week_val_list'] = json.dumps(week_val_list)
            for x in tickets_sold:
                week_tic_list[int(x['period'].strftime('%U'))] = x['total']
            response.context_data['week_tic_list'] = json.dumps(week_tic_list)

        elif period == "day":
            response.context_data['week'] = False
            label_list = []
            for x in range(1, 32):
                label_list.append(str(x))
            response.context_data['label_list'] = json.dumps(label_list)

            val_list = [0] * 31
            tic_list = [0] * 31
            for x in summary_over_time:
                val_list[int(x['period'].strftime('%d')) - 1] = x['total']
            response.context_data['val_list'] = json.dumps(val_list)
            for x in tickets_sold:
                tic_list[int(x['period'].strftime('%d')) - 1] = x['total']
            response.context_data['tic_list'] = json.dumps(tic_list)

        return response
Example #12
0
class Transaction(Model):
    """
    Transaction:
        Scorebot Score Base

        Defines a Base Python Class object for tracking and managing score types, results and values. Allows for
        tracking of the "score stack", which is a history of all Transactions for a Team over time.

                Subclasses Must Define:
            save        ()
            __json__    ()
            __score__   ()
            __string__  ()
    """
    class Meta:
        verbose_name = '[Score] Transaction'
        verbose_name_plural = '[Score] Transaction'

    value = IntegerField('Transaction Value', default=0)
    when = DateTimeField('Transaction Date/Time', auto_now_add=True)
    previous = OneToOneField('self', null=True, blank=True, on_delete=SET_NULL)
    source = ForeignKey('scorebot_db.ScoreTeam',
                        on_delete=CASCADE,
                        related_name='score_source')
    destination = ForeignKey('scorebot_db.ScoreTeam',
                             on_delete=CASCADE,
                             related_name='score_destination')
    subclass = SmallIntegerField('Team SubClass',
                                 default=None,
                                 null=True,
                                 editable=False,
                                 choices=SCORE_SUBCLASS)

    def log(self):
        # Log the Score to a Flat File (Triggered on Saves).
        #
        # Columns
        # Value, Type, ISO When, Path From, Path To, Score
        Scoring.info('%d,%s,%s,%s,%s,%d' %
                     (self.get_score(), self.get_name(), self.when.isoformat(),
                      self.source.get_path(), self.destination.get_path(),
                      self.destination.get_score()))

    def name(self):
        return str(self.__subclass__().__class__.__name__)

    def json(self):
        return self.__subclass__().__json__()

    def score(self):
        return self.__subclass__().__score__()

    def stack(self):
        total = 0
        score = self
        stack = list()
        while score is not None:
            stack.append(score.json())
            total += score.score()
            score = next(score)
        result = {'stack': stack, 'total': total}
        del stack
        del total
        return result

    def total(self):
        if self.previous is not None:
            return self.score() + self.previous.score()
        return self.score()

    def reverse(self):
        transaction = new(self.name(), save=False)
        transaction.when = self.when
        transaction.subclass = self.subclass
        transaction.value = self.score() * -1
        transaction.destination = self.source
        transaction.source = self.destination
        transaction.save()
        return transaction

    def __str__(self):
        return self.__subclass__().__string__()

    def __len__(self):
        return abs(self.score())

    def __next__(self):
        return self.previous

    def __bool__(self):
        return self.score() > 0

    def __json__(self):
        return {
            'type': self.name(),
            'value': self.get_score(),
            'when': self.when.isoformat(),
            'source': self.source.name,
            'destination': self.destination.name
        }

    def __score__(self):
        return self.value

    def __string__(self):
        return '[Transaction] (%s) %d: %s -> %s' % (
            self.when.strftime('%m/%d/%y %H:%M'), self.value,
            self.source.path(), self.destination.path())

    def __subclass__(self):
        if self.subclass == SCORE_SUBCLASS_TRANSACTION or self.__class__.__name__ == self.get_subclass_display(
        ):
            return self
        if self.subclass == SCORE_SUBCLASS_PAYMENT:
            return self.payment
        if self.subclass == SCORE_SUBCLASS_TRANSFER:
            return self.transfer
        if self.subclass == SCORE_SUBCLASS_PURCHASE:
            return self.purchase
        if self.subclass == SCORE_SUBCLASS_CORRECTION:
            return self.correction
        if self.subclass == SCORE_SUBCLASS_PAYMENTHEALTH:
            return self.paymenthealth
        if self.subclass == SCORE_SUBCLASS_TRANSFERRESULT:
            return self.transferresult
        if self.subclass == SCORE_SUBCLASS_TRANSACTIONFLAG:
            return self.transactionflag
        if self.subclass == SCORE_SUBCLASS_TRANSACTIONBEACON:
            return self.transactionbeacon
        return self

    def __lt__(self, other):
        return isinstance(other, Transaction) and other.score() > self.score()

    def __gt__(self, other):
        return isinstance(other, Transaction) and other.score() < self.score()

    def __eq__(self, other):
        return isinstance(other, Transaction) and other.score() == self.score()

    def save(self, *args, **kwargs):
        if self.subclass is None:
            self.subclass = SCORE_SUBCLASS_TRANSACTION
        Model.save(self, *args, **kwargs)
Example #13
0
class Job(Model):
    """A job represents a work which has to be done in the 'background' (ie:
    another process than the processes which respond to the clients). They are
    useful for periodic tasks (eg: polling data, like emails, from another server)
    or long tasks (eg: generating a lot of data).

    The type of the job (see creme_core.creme_jobs.base.JobType) determines if
    the job is periodic, pseudo-periodic or not periodic.

    Periodic & pseudo-periodic (see JobType for the difference between them) Jobs
    must be 'system' Job:
        - they are created in 'populate' scripts.
        - they have no user.
        - they can not be deleted, but they can be disabled (see 'enabled' field).
        - periodic Jobs must have their 'periodicity' field filled.
        - pseudo-periodic Jobs should not have their 'periodicity' field filled,
          because it is useless ; the value settings.PSEUDO_PERIOD is used as
          security period instead.

    Not periodic Jobs are user Jobs:
        - they are dynamically created by a view.
        - they must have their 'user' filled; it correspond to the User which
          have created the Job, & who owns it. The Job should act with the
          credentials of this User.
        - A view which creates a Job should check settings.MAX_JOBS_PER_USER
          before creating a Job, and redirect to the jobs list view if the Job
          can not be created (tip: you can use Job.not_finished_jobs()).
        - They have to be deleted once they are finished, in order to create
          other user Jobs.

    The 'reference_run' field is always filled (in an automatic way at least),
    but does not means anything for not periodic Jobs ; in this case it is only
    the creation date, which is not very useful. The 'reference_run' is used to
    compute the time of each execution, which must be something like:
        reference_run + N * periodicity
    """
    STATUS_WAIT = 1
    STATUS_ERROR = 10
    STATUS_OK = 20

    type_id = CharField(_(u'Type of job'), max_length=48, editable=False)
    user = CremeUserForeignKey(verbose_name=_(u'User'),
                               null=True,
                               editable=False)
    enabled = BooleanField(_(u'Enabled'), default=True, editable=False)
    language = CharField(_(u'Language'), max_length=10, editable=False)
    # created      = CreationDateTimeField(_('Creation date'))
    reference_run = DateTimeField(_(u'Reference run'))
    periodicity = DatePeriodField(_(u'Periodicity'), null=True)
    last_run = DateTimeField(_(u'Last run'), null=True, editable=False)
    ack_errors = PositiveIntegerField(
        default=0,
        editable=False)  # Number of errors of communication with the queue.
    status = PositiveSmallIntegerField(
        _(u'Status'),
        editable=False,
        default=STATUS_WAIT,
        choices=(
            (STATUS_WAIT, _(u'Waiting')),
            (STATUS_ERROR, _(u'Error')),
            (STATUS_OK, _(u'Completed successfully')),
        ),
    )
    error = TextField(_(u'Error'), null=True, editable=False)
    raw_data = TextField(
        editable=False
    )  # It stores the Job's parameters  # TODO: use a JSONField ?

    class Meta:
        app_label = 'creme_core'
        verbose_name = _(u'Job')
        verbose_name_plural = _(u'Jobs')
        # ordering = ('created',)
        ordering = ('id', )

    def __init__(self, *args, **kwargs):
        # super(Job, self).__init__(*args, **kwargs)
        super().__init__(*args, **kwargs)
        if not self.language:
            self.language = get_language()

        self.__init_refreshing_cache()

    def __init_refreshing_cache(self):
        self._old_periodicity = self.periodicity
        self._old_reference_run = self.reference_run
        self._old_enabled = self.enabled

    def __str__(self):
        return str(self.type)

    def __repr__(self):
        return u'<Job type="{type}" id="{id}">'.format(type=self.type_id,
                                                       id=self.id)

    def get_absolute_url(self):
        return reverse('creme_core__job', args=(self.id, ))

    def get_delete_absolute_url(self):
        return reverse('creme_core__delete_job', args=(self.id, ))

    def get_edit_absolute_url(self):
        return reverse('creme_core__edit_job', args=(self.id, ))

    @property
    def data(self):
        return jsonloads(self.raw_data)  # TODO: cache

    @data.setter
    def data(self, value):
        self.raw_data = jsondumps(value)

    @property
    def description(self):  # TODO: cache ?
        try:
            return self.type.get_description(self)
        except Exception:
            logger.exception(
                'Error when building the description of the job id="%s"',
                self.id)

        return ()

    def check_owner(self, user):
        return user.is_superuser or self.user == user

    def check_owner_or_die(self, user):
        if not self.check_owner(user):
            raise PermissionDenied('You are not the owner of this job')

    @property
    def is_finished(self):
        return self.status != self.STATUS_WAIT

    @classmethod
    def not_finished_jobs(cls, user):
        return cls.objects.filter(user=user, status=cls.STATUS_WAIT)

    @property
    def progress(self):
        jtype = self.type

        if jtype is not None:
            return jtype.progress(self)

    @property
    def real_periodicity(self):
        periodicity = self.periodicity

        if periodicity is None and self.user_id is None:
            periodicity = HoursPeriod(value=settings.PSEUDO_PERIOD)

        return periodicity

    def _update_ack_errors(self, incr):
        Job.objects.filter(id=self.id).update(ack_errors=F('ack_errors') +
                                              incr)

    def forget_ack_errors(self):
        self._update_ack_errors(-self.ack_errors)

    def get_config_form_class(self):
        "@see JobType.get_config_form_class()"
        jtype = self.type
        return jtype.get_config_form_class(self) if jtype is not None else None

    def refresh(self, force=False):
        """Ask to the JobManager to refresh the job if it's needed, because
        the next runs should be earlier, or disabled.
        @param force: Boolean ; <True> means the message is sent even if no field has changed.
        """
        from ..core.job import JobManagerQueue

        queue_error = False
        enabled = self.enabled
        reference_run = self.reference_run
        periodicity = self.periodicity

        if self._old_enabled != enabled or \
           self._old_reference_run != reference_run or \
           self._old_periodicity != periodicity or \
           force:
            # NB: we sent all the fields values in order to get a more robust system
            #     (even if a REFRESH-message is lost, the next one is complete).
            data = {
                'enabled': enabled,
                'reference_run': dt_to_ISO8601(reference_run),
            }

            if periodicity:
                data['periodicity'] = periodicity.as_dict()

            queue_error = JobManagerQueue.get_main_queue().refresh_job(
                self, data)
            self.__init_refreshing_cache()

        return queue_error

    def update(self, refresh_data, date_period_registry=date_period_registry):
        """Update the fields with information generated by refresh().

        Notice that the instance is not saved.

        @param refresh_data: Dictionary. See data sent on queue by refresh().
        @param date_period_registry: Instance of creme_core.utils.date_period.DatePeriodRegistry.
        @return: True if the instance has changed.
        """
        changed = False
        get = refresh_data.get

        enabled = get('enabled')
        if enabled is not None:
            if self.enabled != enabled:
                self.enabled = enabled
                changed = True

        ref_run_str = get('reference_run')
        if ref_run_str is not None:
            ref_run = dt_from_ISO8601(ref_run_str)

            if self.reference_run != ref_run:
                self.reference_run = ref_run
                changed = True

        periodicity_dict = get('periodicity')
        if periodicity_dict is not None:
            periodicity = date_period_registry.deserialize(periodicity_dict)

            if self.periodicity != periodicity:
                self.periodicity = periodicity
                changed = True

        return changed

    @atomic
    def save(self, *args, **kwargs):
        from ..core.job import JobManagerQueue

        created = self.pk is None

        if created and self.reference_run is None:
            self.reference_run = now()

            if self.user_id is None:  # System job
                self.reference_run = round_hour(self.reference_run)

        # super(Job, self).save(*args, **kwargs)
        super().save(*args, **kwargs)

        queue_error = False

        if created:
            if self.user_id is not None:
                queue_error = JobManagerQueue.get_main_queue().start_job(self)
        elif self.user_id is None:  # System job
            queue_error = self.refresh()

        if queue_error:
            self._update_ack_errors(1)

    @property
    def stats(self):
        jtype = self.type
        return jtype.get_stats(self) if jtype is not None else []

    @property
    def type(self):
        from ..core.job import job_type_registry
        return job_type_registry.get(self.type_id)

    @type.setter
    def type(self, value):
        # TODO: check that it is in job_type_registry ?
        self.type_id = value.id
Example #14
0
class Case(Model):
    """案件
    * state: 案件狀態, 預設值為未成案
    * uuid: 案件編號(uuid4)
    * type: 案件類別
    * region: 使用者所在選區
    * title: 標題
    * content: 案件內容
    * location: 相關地址
    * username: 使用者名字
    * mobile: 手機
    * email: 信箱
    * address: 地址
    * open_time: 成案日期
    * close_time: 結案日期
    * update_time: 上次更新時間
    """
    state = FSMField(default=State.DRAFT,
                     verbose_name=_('Case State'),
                     choices=State.CHOICES)
    uuid = UUIDField(default=uuid.uuid4, verbose_name=_('UUID'), unique=True)
    number = CharField(max_length=6,
                       default='-',
                       null=True,
                       blank=True,
                       verbose_name=_('Case Number'))
    type = ForeignKey('cases.Type',
                      on_delete=CASCADE,
                      related_name='cases',
                      verbose_name=_('Case Type'))
    region = ForeignKey('cases.Region',
                        on_delete=CASCADE,
                        related_name='cases',
                        verbose_name=_('User Region'))
    title = CharField(max_length=255, verbose_name=_('Case Title'))
    content = TextField(verbose_name=_('Content'))
    location = CharField(null=True,
                         blank=True,
                         max_length=255,
                         verbose_name=_('Location'))
    username = CharField(max_length=50, verbose_name=_('Username'))
    mobile = CharField(max_length=10,
                       null=True,
                       blank=True,
                       verbose_name=_('Mobile'))
    email = EmailField(null=True, blank=True, verbose_name=_('Email'))
    address = CharField(null=True,
                        blank=True,
                        max_length=255,
                        verbose_name=_('Address'))
    open_time = DateTimeField(null=True,
                              blank=True,
                              verbose_name=_('Opened Time'))
    close_time = DateTimeField(null=True,
                               blank=True,
                               verbose_name=_('Closed Time'))
    create_time = DateTimeField(auto_now_add=True,
                                null=True,
                                blank=True,
                                verbose_name=_('Created Time'))
    update_time = DateTimeField(auto_now=True,
                                null=True,
                                blank=True,
                                verbose_name=_('Updated Time'))

    disapprove_info = TextField(null=True,
                                blank=True,
                                verbose_name=_('Disapprove Info'))

    note = TextField(null=True, blank=True, verbose_name=_('Case Notes'))
    tags = TagField(blank=True, verbose_name=_('Case Tags'))

    objects = CaseQuerySet.as_manager()

    class Meta:
        verbose_name = _('Case')
        verbose_name_plural = _('Cases')
        ordering = ('id', )

    def save(self, *args, **kwargs):
        created = self.pk is None
        super(Case, self).save(*args, **kwargs)
        if created:
            self.number = str(self.pk).zfill(6)
            self.save()
            self.confirm(template_name='收件通知')
            self.move_file()

    def __str__(self):
        return self.number

    def to_dict(self):
        """用於新增CaseHistory"""
        return {
            'state': self.state,
            'title': self.title,
            'type': self.type,
            'region': self.region,
            'content': self.content,
            'location': self.location,
            'username': self.username,
            'mobile': self.mobile,
            'email': self.email,
            'address': self.address,
        }

    def move_file(self):
        case = Case.objects.get(uuid=self.uuid)
        objs = TempFile.objects.filter(case_uuid=self.uuid)
        for i in objs:
            file = TEMP_STORAGE.open(i.file.name)
            case_file = CaseFile()
            case_file.case = case
            case_file.file = file
            case_file.save()
Example #15
0
class Unit(Model):
    name = CharField(_('name'), max_length=64, unique=True, db_index=True)
    # who has the right to see detailed data for this unit?
    owner = ForeignKey(settings.AUTH_USER_MODEL,
                       verbose_name=_('owner'),
                       related_name='owns_unit',
                       null=True,
                       blank=True)
    # which users belong to this unit?
    members = ManyToManyField(settings.AUTH_USER_MODEL,
                              verbose_name=_('members'),
                              related_name='units',
                              blank=True,
                              db_index=True)
    # which qualifications are relevant for this unit?
    qualifications = ManyToManyField(Qualification,
                                     verbose_name=_('qualifications'),
                                     related_name='units',
                                     blank=True,
                                     db_index=True)
    date_created = DateTimeField(_('created (date)'),
                                 null=False,
                                 db_index=True,
                                 auto_now_add=True)
    date_modified = DateTimeField(_('modified (date)'),
                                  null=False,
                                  db_index=True,
                                  auto_now=True)

    objects = UnitManager()

    def __unicode__(self):
        return self.name

    def per_qualification_statistics(self):
        """Qualification statistics on a unit level.

        Returns a list of...

        """
        # TODO Should be limited to the qualifications belonging to
        # TODO the unit or not?
        stats = []
        n_members = self.members.count()

        for q in self.qualifications.all():
            criteria_ids = q.criteria.values_list('id', flat=True)
            print self.members \
                .filter(met_criteria__id__in=criteria_ids) \
                .distinct()
            n_users_met_qualification = self.members \
                .filter(met_criteria__id__in=criteria_ids) \
                .distinct() \
                .count()
            stats.append((q, n_users_met_qualification, n_members))
        return stats

    def per_user_statistics(self):
        # TODO Should be limited to the qualifications belonging to
        # TODO the unit?
        pass

    class Meta:
        verbose_name = _('unit')
        verbose_name_plural = _('units')
Example #16
0

class ExtractSecond(Extract):
    lookup_name = 'second'


DateField.register_lookup(ExtractYear)
DateField.register_lookup(ExtractMonth)
DateField.register_lookup(ExtractDay)
DateField.register_lookup(ExtractWeekDay)

TimeField.register_lookup(ExtractHour)
TimeField.register_lookup(ExtractMinute)
TimeField.register_lookup(ExtractSecond)

DateTimeField.register_lookup(ExtractYear)
DateTimeField.register_lookup(ExtractMonth)
DateTimeField.register_lookup(ExtractDay)
DateTimeField.register_lookup(ExtractWeekDay)
DateTimeField.register_lookup(ExtractHour)
DateTimeField.register_lookup(ExtractMinute)
DateTimeField.register_lookup(ExtractSecond)

ExtractYear.register_lookup(YearExact)
ExtractYear.register_lookup(YearGt)
ExtractYear.register_lookup(YearGte)
ExtractYear.register_lookup(YearLt)
ExtractYear.register_lookup(YearLte)


class TruncBase(TimezoneMixin, Transform):
Example #17
0
class Journal(models.Model):
    key_out_date = DateTimeField(null=True)
    key_in_date = DateTimeField(auto_now_add=True)
    tenant = ForeignKey(Tenant, on_delete=DO_NOTHING)
    key = ForeignKey(Key, on_delete=DO_NOTHING)
 def __init__(self, *args, **kwargs):
     kwargs.setdefault('editable', False)
     kwargs.setdefault('blank', True)
     kwargs.setdefault('auto_now_add', True)
     DateTimeField.__init__(self, *args, **kwargs)
Example #19
0
class Showable(Model):
    name = CharField(default='!%', max_length=10)
    description = CharField(default='', max_length=140)
    image = ImageField()
    user = ForeignKey(User, related_name='+', on_delete=CASCADE)
    date = DateTimeField(auto_now_add=True)
Example #20
0
 def migrate(self):
     if not self.db.has_column("auth_user", "last_login"):
         self.db.add_column(
             "auth_user", "last_login",
             DateTimeField("Last Login", blank=True, null=True))
Example #21
0
class UploadedImages(Model):

    # Fix pluralization in admin panel
    class Meta:
        verbose_name_plural = "Uploaded Images"

    # Define image categories to be displayed under in ~/templates/our-work.html
    CATEGORIES = (
        ('No_Category', 'Select a Category'),
        ('House_Wash', 'House Wash'),
        ('Wood_Restoring', 'Wood Restoring'),
        ('Oxidation_Removal', 'Oxidation Removal'),
        ('Stain_Removal', 'Stain Removal'),
    )

    DEGREES = (
        (0, '0 degrees'),
        (270, '90 degrees (90 degrees clockwise)'),
        (180, '180 degrees (upside-down)'),
        (90, '270 degrees (90 degrees counter-clockwise)'),
    )

    # Define the user image input fields in the Django admin panel
    Category = CharField(max_length=64,
                         null=True,
                         choices=CATEGORIES,
                         default='No_Category')
    Before_Picture_Description = CharField(max_length=64,
                                           null=True,
                                           blank=True)
    Before_Picture_Size_kB = IntegerField(null=True, default=140)
    Before_Picture_Max_Dimension = IntegerField(null=True, default=768)
    Before_Picture_Rotation = IntegerField(null=True,
                                           choices=DEGREES,
                                           default=0)
    Before_Picture = ImageField(upload_to='images/', null=True)
    After_Picture_Description = CharField(max_length=64, null=True, blank=True)
    After_Picture_Size_kB = IntegerField(null=True, default=140)
    After_Picture_Max_Dimension = IntegerField(null=True, default=768)
    After_Picture_Rotation = IntegerField(null=True,
                                          choices=DEGREES,
                                          default=0)
    After_Picture = ImageField(upload_to='images/', null=True)
    date = DateTimeField(auto_now_add=True, null=True)
    Notes = TextField(max_length=200, null=True, blank=True)

    # Add some extra functionality to the default behavior of the *.save() method
    # via the *.super() method
    def save(self, *args, **kwargs):
        if self.Before_Picture:

            # Note: this will overwrite the image uploaded by the user
            self.Before_Picture = self.resize_image(
                self.Before_Picture, self.Before_Picture_Size_kB,
                self.Before_Picture_Max_Dimension,
                self.Before_Picture_Rotation)
            self.After_Picture = self.resize_image(
                self.After_Picture, self.After_Picture_Size_kB,
                self.After_Picture_Max_Dimension, self.After_Picture_Rotation)
        super(UploadedImages, self).save(*args, **kwargs)

    # Resize user-uploaded images
    # https://stackoverflow.com/questions/3723220/how-do-you-convert-a-pil-image-to-a-django-file
    def resize_image(self, picture, size_target, max_dim, rotation):

        # Set variables for the *.binary_search() method
        size_target = size_target * 1000  # Ideal image size (in bytes)
        dimensions = [(max_dim, max_dim)]  # Dimensions for *.thumbnail()
        dimension_factor = 1  # For generating 1x, 2x (retina), or higher res.
        i = 1  # Iteration starting point
        max_i = 7  # Max number of iterations
        quality = 50  # Starting quality value
        L = 1  # Left pointer
        R = 100  # Right pointer

        # Run the binary search algorithm once for each set of dimensions you want to
        # create images at, ie. 320, 576, 768, etc. Currently there is no implementation
        # on the front-end to support more than one set of dimensions, but I'm keeping
        # the FOR loop here anyways so I know where to start if I implement multiple
        # dimensions later in order to support responsive images.
        for dimension in dimensions:
            im_buffer = self.binary_search(picture, size_target, dimension,
                                           dimension_factor, rotation, i,
                                           max_i, quality, L, R)

        # When files are uploaded in Django they are stored in a dictionary called
        # request.FILES as "UploadedFile" objects (or a subclass like
        # InMemoryUploadedFile). We can try to grab the BytesIO object and convert it
        # back into a File object (or "Django" File object) while the BytesIO object
        # is in memory, ie. while it exists within this function.
        #
        # picture.name: *.name is a Django File object attribute that includes the
        # name of the file plus its relative path from MEDIA_ROOT
        #
        # Syntax:
        # InMemoryUploadedFile(file, field_name, name, content_type, size, charset)
        if im_buffer is not None:
            im_resized_file = InMemoryUploadedFile(
                im_buffer, None, picture.name, 'image/jpeg',
                im_buffer.getbuffer().nbytes, None)
            return im_resized_file
        else:
            print("{} was not altered".format(picture))
            return picture

    # Binary search algorithm that uses 3 pointers -- L, R, and quality, where the
    # value for quality is used by PIL's *.save() method to set the quality of an
    # image -- in an attempt to find a quality that produces an image with an file
    # size that is as close to the value for size_target as max_i number of
    # iterations will allow (close, but not perfect, could be memoized I think).
    def binary_search(self,
                      picture,
                      size_target,
                      dimension,
                      dimension_factor,
                      rotation,
                      i,
                      max_i,
                      quality,
                      L,
                      R,
                      im_buffer=None):

        # It's possible that the picture file size is already less than the target
        # file size, but we can still rotate the image here.
        if picture.size < size_target:
            print("{} is already less than {} bytes".format(
                picture, size_target))
            im = Image.open(picture)
            if rotation == 90:
                im = im.transpose(Image.ROTATE_90)
            elif rotation == 180:
                im = im.transpose(Image.ROTATE_180)
            elif rotation == 270:
                im = im.transpose(Image.ROTATE_270)
            im_buffer = BytesIO()
            im.save(im_buffer, "JPEG", quality=quality)
            return im_buffer

        # If the maximum number of iterations have been reached, return
        if i > max_i:
            print("Max iterations have been reached for {}".format(picture))
            return im_buffer

        # Open the image file, alter its dimensions, and save it as a new BytesIO file
        # named 'im_buffer'.
        if quality <= 95:
            im = Image.open(picture)
            if rotation == 90:
                im = im.transpose(Image.ROTATE_90)
            elif rotation == 180:
                im = im.transpose(Image.ROTATE_180)
            elif rotation == 270:
                im = im.transpose(Image.ROTATE_270)
            new_dimension = (dimension[0] * dimension_factor,
                             dimension[1] * dimension_factor)
            im.thumbnail(new_dimension, Image.ANTIALIAS)
            # new_prefix = '{}x-'.format(dimension_factor)
            # new_name = new_prefix + name + '-' + str(dimension[0]) + '.jpg'
            im_buffer = BytesIO()
            im.save(im_buffer, "JPEG", quality=quality)

            # Use L and R pointers to move closer to a value for the 'quality' parameter
            # that produces an image with a file size, in bytes, as close to size_target
            # as possible using a binary search-type of algorithm.
            if im_buffer.getbuffer().nbytes < size_target:
                print(
                    'Resulting image size is LESS    than {} bytes:'.format(
                        size_target),
                    im_buffer.getbuffer().nbytes, 'bytes, quality =', quality)
                L = quality
                quality = int((R + L) / 2)
                return self.binary_search(picture, size_target, dimension,
                                          dimension_factor, rotation, i + 1,
                                          max_i, quality, L, R, im_buffer)
            elif im_buffer.getbuffer().nbytes > size_target:
                print(
                    'Resulting image size is GREATER than {} bytes:'.format(
                        size_target),
                    im_buffer.getbuffer().nbytes, 'bytes, quality =', quality)
                R = quality
                quality = int((R + L) / 2)
                return self.binary_search(picture, size_target, dimension,
                                          dimension_factor, rotation, i + 1,
                                          max_i, quality, L, R, im_buffer)
            else:
                print(
                    'Resulting image size EQUALS {} bytes:'.format(
                        size_target),
                    im_buffer.getbuffer().nbytes, 'bytes, quality =', quality)
                return im_buffer
        else:
            return im_buffer
class lastSeen(Model):
    user = ForeignKey(user_details, on_delete=CASCADE, verbose_name='user', null=True, blank=True , db_index=True)
    lastSeen = DateTimeField(editable=False, db_index=True)
    activeDevices = IntegerField(default=0)
    def __str__(self):
        return "{}, Active Devices- {}".format(self.user.username , self.activeDevices)

    def _isActive(self):
        if self.activeDevices < 1:
            return False
        else:
            return True

    def notify_ws_clients(self):
        """
        Inform client there is a message is liked.
        """
        if self.activeDevices < 1:
            notification = {
                'type': 'lastSeenUpdate',
                'chat': self.user.username,
                'activeNow': False,
                'lastSeen': str(self.lastSeen),
            }

        elif self.activeDevices == 1:
            notification = {
                'type': 'lastSeenUpdate',
                'chat': self.user.username,
                'activeNow': True,
                'lastSeen': str(self.lastSeen),
            }
        #print(notification)
        channel_layer = get_channel_layer()        
        latest_users = list(set(MessageModel.objects.filter(user=self.user).values_list('recipient', flat=True).order_by('pk')))
        for chat_user_pk in latest_users:
            try:
                chat_user = user_details.objects.get(pk = chat_user_pk)
                print("Notifing -" ,chat_user.username, end=' ')
                async_to_sync(channel_layer.group_send)("{}".format(chat_user.username), notification)
                #print("Success")

            except Exception as e:
                e = str(e)
                print(e)
                print("Failes to webhook recnent chat")

    def save(self, *args, **kwargs):
        """
        Trims white spaces, saves the message and notifies the recipient via WS
        if the message is new.
        """
        #print("Last Seen Updated")
        activeDevices = self.activeDevices
        from datetime import datetime
        self.lastSeen = datetime.now()
        super(lastSeen, self).save(*args, **kwargs)
        #print("last seen updated")
        #if new != None: #send websoked even if message is disliked. 
        #print(activeDevices)
        if activeDevices <= 1:
            self.notify_ws_clients()
Example #23
0
class ChatMessageModel(Model):
    """
    This class represents a chat message. It has a owner (user), timestamp and
    the message body.

    """
    user = ForeignKey(get_user_model(),
                      on_delete=CASCADE,
                      verbose_name='user',
                      related_name='from_user',
                      db_index=True)
    recipient = ForeignKey(get_user_model(),
                           on_delete=CASCADE,
                           verbose_name='recipient',
                           related_name='to_user',
                           db_index=True,
                           null=True,
                           blank=True)
    created = DateTimeField(auto_now_add=True, editable=False, db_index=True)
    read_date = DateTimeField(editable=False, null=True, blank=True)
    room = CharField(max_length=150, null=True, blank=True)
    body = TextField('body')
    broadcast = BooleanField(default=False)

    def __str__(self):
        return str(self.id)

    def characters(self):
        """
        Toy function to count body characters.
        :return: body's char number
        """
        return len(self.body)

    def notify_single_client(self, sender, recipient):
        """
        Inform client there is a new message.
        """
        channel_layer = get_channel_layer()
        sender_channel = UserChannel.objects.filter(user__pk=sender.pk,
                                                    room=self.room).first()
        recipient_channel = UserChannel.objects.filter(user__pk=recipient.pk,
                                                       room=self.room).first()

        notification = {
            'type':
            'receive',
            'message':
            self.id,
            'user_fullname':
            '{} {}'.format(self.user.first_name, self.user.last_name),
            'is_operator':
            chat_operator(self.user, self.room),
            'operator_status':
            sender_channel.status if sender_channel else True
        }

        # print(notification)

        if sender_channel and sender_channel.channel:
            async_to_sync(channel_layer.send)(sender_channel.channel,
                                              notification)

        if recipient_channel and recipient_channel.channel:
            async_to_sync(channel_layer.send)(recipient_channel.channel,
                                              notification)

    def notify_ws_clients(self):
        """
        Inform client there is a new message.
        """
        channel_layer = get_channel_layer()
        sender_channel = UserChannel.objects.filter(user=self.user,
                                                    room=self.room).first()
        notification = {
            'type':
            'receive_group_message',
            'message':
            '{}'.format(self.id),
            'user_fullname':
            '{} {}'.format(self.user.first_name, self.user.last_name),
            'is_operator':
            chat_operator(self.user, self.room),
            'operator_status':
            sender_channel.status if sender_channel else True
        }
        channel_layer = get_channel_layer()
        async_to_sync(channel_layer.group_send)(self.room, notification)

    def save(self, *args, **kwargs):
        """
        Trims white spaces, saves the message and notifies the recipient via WS
        if the message is new.
        """
        # broadcast only for staff users
        if self.broadcast and not chat_operator(self.user, self.room):
            return False
        new = self.id
        self.body = self.body.strip()  # Trimming whitespaces from the body
        # Escape text to avoi XSS attack and render hrefs
        self.body = get_text_with_hrefs(strip_tags(self.body))
        super(ChatMessageModel, self).save(*args, **kwargs)
        channel = UserChannel.objects.filter(user=self.user,
                                             room=self.room).first()
        if channel:
            channel.save(update_fields=['last_seen'])
        if not new:
            if self.broadcast: self.notify_ws_clients()
            else:
                # notify sender and recipient
                self.notify_single_client(sender=self.user,
                                          recipient=self.recipient)
                # notify sender
                # self.notify_single_client(recipient=self.user)

    # Meta
    class Meta:
        app_label = 'chat'
        verbose_name = 'message'
        verbose_name_plural = 'messages'
        ordering = ('-created', )
Example #24
0
    def basequeryset(reportclass, request, *args, **kwargs):
        q = PurchaseOrder.objects.all()
        if args and args[0]:
            paths = request.path.split("/")
            path = paths[4]
            if paths[4] == "operationplanmaterial":
                q = q.filter(
                    location__name=args[1],
                    item__name=args[0],
                    startdate__lt=args[2],
                    enddate__gte=args[2],
                )
            elif path == "produced":
                q = q.filter(
                    location__name=args[1],
                    item__name=args[0],
                    enddate__gte=args[2],
                    enddate__lt=args[3],
                )
            elif path == "supplier" or request.path.startswith(
                    "/detail/input/supplier/"):
                try:
                    Supplier.rebuildHierarchy(database=request.database)
                    sup = (Supplier.objects.all().using(
                        request.database).get(name=args[0]))
                    lft = sup.lft
                    rght = sup.rght
                except Supplier.DoesNotExist:
                    lft = 1
                    rght = 1
                q = q.filter(supplier__lft__gte=lft, supplier__rght__lte=rght)
            elif path == "location" or request.path.startswith(
                    "/detail/input/location/"):
                try:
                    Location.rebuildHierarchy(database=request.database)
                    loc = (Location.objects.all().using(
                        request.database).get(name=args[0]))
                    lft = loc.lft
                    rght = loc.rght
                except Location.DoesNotExist:
                    lft = 1
                    rght = 1
                q = q.filter(location__lft__gte=lft, location__rght__lte=rght)
            elif path == "item" or request.path.startswith(
                    "/detail/input/item/"):
                try:
                    Item.rebuildHierarchy(database=request.database)
                    itm = Item.objects.all().using(
                        request.database).get(name=args[0])
                    lft = itm.lft
                    rght = itm.rght
                except Item.DoesNotExist:
                    lft = 1
                    rght = 1
                q = q.filter(item__lft__gte=lft, item__rght__lte=rght)

        q = reportclass.operationplanExtraBasequery(q.select_related("item"),
                                                    request)
        return q.annotate(
            unit_cost=Cast(
                RawSQL(
                    """
                    coalesce((
                      select cost
                      from itemsupplier
                      where itemsupplier.item_id = operationplan.item_id
                        and (itemsupplier.location_id is null or itemsupplier.location_id = operationplan.location_id)
                        and itemsupplier.supplier_id = operationplan.supplier_id
                      order by operationplan.enddate < itemsupplier.effective_end desc nulls first,
                         operationplan.enddate >= itemsupplier.effective_start desc nulls first,
                         priority <> 0,
                         priority
                      limit 1),
                     (select cost from item where item.name = operationplan.item_id), 0)
                    """,
                    [],
                ),
                output_field=FloatField(),
            ),
            total_cost=Cast(F("unit_cost") * F("quantity"),
                            output_field=FloatField()),
            total_volume=Cast(F("item__volume") * F("quantity"),
                              output_field=FloatField()),
            total_weight=Cast(F("item__weight") * F("quantity"),
                              output_field=FloatField()),
            feasible=RawSQL(
                "coalesce((operationplan.plan->>'feasible')::boolean, true)",
                []),
            computed_color=RawSQL(
                """
                case when operationplan.color >= 999999 and operationplan.plan ? 'item' then
                999999
                - extract(epoch from operationplan.delay)/86400.0
                + 1000000
                when operationplan.color >= 999999 and not(operationplan.plan ? 'item') then
                999999
                - extract(epoch from operationplan.delay)/86400.0
                else operationplan.color
                end
                """,
                [],
            ),
            itemsupplier_sizeminimum=Cast(
                RawSQL(
                    """
                    select sizeminimum
                    from itemsupplier
                    where itemsupplier.item_id = operationplan.item_id
                      and (itemsupplier.location_id is null or itemsupplier.location_id = operationplan.location_id)
                      and itemsupplier.supplier_id = operationplan.supplier_id
                    order by operationplan.enddate < itemsupplier.effective_end desc nulls first,
                       operationplan.enddate >= itemsupplier.effective_start desc nulls first,
                       priority <> 0,
                       priority
                    limit 1
                    """,
                    [],
                ),
                output_field=FloatField(),
            ),
            itemsupplier_sizemultiple=Cast(
                RawSQL(
                    """
                    select sizemultiple
                    from itemsupplier
                    where itemsupplier.item_id = operationplan.item_id
                      and (itemsupplier.location_id is null or itemsupplier.location_id = operationplan.location_id)
                      and itemsupplier.supplier_id = operationplan.supplier_id
                    order by operationplan.enddate < itemsupplier.effective_end desc nulls first,
                       operationplan.enddate >= itemsupplier.effective_start desc nulls first,
                       priority <> 0,
                       priority
                    limit 1
                    """,
                    [],
                ),
                output_field=FloatField(),
            ),
            itemsupplier_sizemaximum=Cast(
                RawSQL(
                    """
                    select sizemaximum
                    from itemsupplier
                    where itemsupplier.item_id = operationplan.item_id
                      and (itemsupplier.location_id is null or itemsupplier.location_id = operationplan.location_id)
                      and itemsupplier.supplier_id = operationplan.supplier_id
                    order by operationplan.enddate < itemsupplier.effective_end desc nulls first,
                       operationplan.enddate >= itemsupplier.effective_start desc nulls first,
                       priority <> 0,
                       priority
                    limit 1
                    """,
                    [],
                ),
                output_field=FloatField(),
            ),
            itemsupplier_priority=Cast(
                RawSQL(
                    """
                    select priority
                    from itemsupplier
                    where itemsupplier.item_id = operationplan.item_id
                      and (itemsupplier.location_id is null or itemsupplier.location_id = operationplan.location_id)
                      and itemsupplier.supplier_id = operationplan.supplier_id
                    order by operationplan.enddate < itemsupplier.effective_end desc nulls first,
                       operationplan.enddate >= itemsupplier.effective_start desc nulls first,
                       priority <> 0,
                       priority
                    limit 1
                    """,
                    [],
                ),
                output_field=FloatField(),
            ),
            itemsupplier_effective_start=Cast(
                RawSQL(
                    """
                    select effective_start
                    from itemsupplier
                    where itemsupplier.item_id = operationplan.item_id
                      and (itemsupplier.location_id is null or itemsupplier.location_id = operationplan.location_id)
                      and itemsupplier.supplier_id = operationplan.supplier_id
                    order by operationplan.enddate < itemsupplier.effective_end desc nulls first,
                       operationplan.enddate >= itemsupplier.effective_start desc nulls first,
                       priority <> 0,
                       priority
                    limit 1
                    """,
                    [],
                ),
                output_field=DateTimeField(),
            ),
            itemsupplier_effective_end=Cast(
                RawSQL(
                    """
                    select effective_end
                    from itemsupplier
                    where itemsupplier.item_id = operationplan.item_id
                      and (itemsupplier.location_id is null or itemsupplier.location_id = operationplan.location_id)
                      and itemsupplier.supplier_id = operationplan.supplier_id
                    order by operationplan.enddate < itemsupplier.effective_end desc nulls first,
                       operationplan.enddate >= itemsupplier.effective_start desc nulls first,
                       priority <> 0,
                       priority
                    limit 1
                    """,
                    [],
                ),
                output_field=DateTimeField(),
            ),
        )
Example #25
0
class Schedule(TimeStampedModel):
    STATUS_CHOICES = (
        (0, 'Pendente'),
        (1, 'Confirmado'),
        (2, 'Faltou'),
        (3, 'Cancelou'),
    )
    NOTIFICATION_STATUS_CHOICES = ((0, 'Pending'), (1, 'Success'),
                                   (2, 'Error'), (3, 'Expired'), (4,
                                                                  'Unknown'))

    class Meta:
        verbose_name = 'Agendamento'
        verbose_name_plural = 'Agendamentos'

    # Model Fields
    patient = ForeignKey(Patient, on_delete=CASCADE)
    dentist = ForeignKey(Dentist, on_delete=CASCADE)
    date = DateTimeField('Data')
    duration = IntegerField('Duração')
    notification_status = IntegerField('Status da notificação', default=0)
    notification_task_id = CharField('ID Notificação',
                                     max_length=50,
                                     default=None,
                                     null=True)
    status = IntegerField('Status do agendamento',
                          choices=STATUS_CHOICES,
                          default=0)

    def get_message(self) -> str:
        local_date = self.date.astimezone(settings.TZ)

        now = datetime.now(tz=settings.TZ).date()

        if (local_date.date() - now).days == 0:
            schedule_date = 'hoje'
        elif (local_date.date() - now).days == 1:
            schedule_date = 'amanhã'
        else:
            schedule_date = local_date.strftime("dia %d/%m")

        message = "Olá {patient_prefix} {patient_name}, " \
                  "não se esqueça de sua consulta odontológica " \
                  "{schedule_date} às {schedule_time}.".format(
                    patient_prefix=self.patient.get_sex_prefix(),
                    patient_name=self.patient.name,
                    dentist_prefix=self.dentist.get_sex_prefix(),
                    dentist_name=self.dentist.first_name,
                    schedule_date=schedule_date,
                    schedule_time=local_date.strftime("%H:%M"))

        return message

    def delete(self, using=None, keep_parents=False):
        self.revoke_notification()
        return super().delete(using, keep_parents)

    def revoke_notification(self):
        celery_app.control.revoke(self.notification_task_id)

    def create_notification(self):
        if date.today() > self.date.date():
            self.notification_status = self.NOTIFICATION_STATUS_CHOICES[3][0]
        else:
            start_time = settings.MESSAGE_ETA
            end_time = settings.MESSAGE_EXPIRES
            msg_datetime = self.date.astimezone(TZ).replace(
                **start_time) - timedelta(days=1)
            msg_expires = msg_datetime.replace(**end_time)
            message = send_message.apply_async((self.id, ),
                                               eta=msg_datetime,
                                               expires=msg_expires)
            if self.notification_task_id:
                self.revoke_notification()
            self.notification_task_id = message.id
            self.notification_status = self.NOTIFICATION_STATUS_CHOICES[0][0]
Example #26
0
class ScriptResult(CleanSave, TimestampedModel):

    # Force model into the metadataserver namespace.
    class Meta(DefaultMeta):
        pass

    script_set = ForeignKey(ScriptSet, editable=False, on_delete=CASCADE)

    # All ScriptResults except commissioning scripts will be linked to a Script
    # as commissioning scripts are still embedded in the MAAS source.
    script = ForeignKey(Script,
                        editable=False,
                        blank=True,
                        null=True,
                        on_delete=CASCADE)

    # Any parameters set by MAAS or the user which should be passed to the
    # running script.
    parameters = JSONObjectField(blank=True, default={})

    # If the result is in reference to a particular block device link it.
    physical_blockdevice = ForeignKey(
        PhysicalBlockDevice,
        editable=False,
        blank=True,
        null=True,
        on_delete=CASCADE,
    )

    # If the result is in reference to a particular Interface link it.
    interface = ForeignKey(Interface,
                           editable=False,
                           blank=True,
                           null=True,
                           on_delete=CASCADE)

    script_version = ForeignKey(
        VersionedTextFile,
        blank=True,
        null=True,
        editable=False,
        on_delete=SET_NULL,
    )

    status = IntegerField(choices=SCRIPT_STATUS_CHOICES,
                          default=SCRIPT_STATUS.PENDING)

    exit_status = IntegerField(blank=True, null=True)

    # Used by the builtin commissioning scripts and installation result. Also
    # stores the Script name incase the Script is deleted but the result isn't.
    script_name = CharField(max_length=255,
                            unique=False,
                            editable=False,
                            null=True)

    output = BinaryField(max_length=1024 * 1024, blank=True, default=b"")

    stdout = BinaryField(max_length=1024 * 1024, blank=True, default=b"")

    stderr = BinaryField(max_length=1024 * 1024, blank=True, default=b"")

    result = BinaryField(max_length=1024 * 1024, blank=True, default=b"")

    # When the script started to run
    started = DateTimeField(editable=False, null=True, blank=True)

    # When the script finished running
    ended = DateTimeField(editable=False, null=True, blank=True)

    # Whether or not the failed script result should be suppressed.
    suppressed = BooleanField(default=False)

    @property
    def name(self):
        if self.script is not None:
            return self.script.name
        elif self.script_name is not None:
            return self.script_name
        else:
            return "Unknown"

    @property
    def status_name(self):
        return SCRIPT_STATUS_CHOICES[self.status][1]

    @property
    def runtime(self):
        if None not in (self.ended, self.started):
            runtime = self.ended - self.started
            return str(runtime - timedelta(microseconds=runtime.microseconds))
        else:
            return ""

    @property
    def starttime(self):
        if self.started is not None:
            return self.started.timestamp()
        else:
            return ""

    @property
    def endtime(self):
        if self.ended is not None:
            return self.ended.timestamp()
        else:
            return ""

    @property
    def estimated_runtime(self):
        # If there is a runtime the script has completed, no need to calculate
        # an estimate.
        if self.runtime != "":
            return self.runtime
        runtime = None
        # Get an estimated runtime from previous runs.
        for script_result in self.history.only(
                "status",
                "started",
                "ended",
                "script_id",
                "script_name",
                "script_set_id",
                "physical_blockdevice_id",
                "created",
        ):
            # Only look at passed results when calculating an estimated
            # runtime. Failed results may take longer or shorter than
            # average. Don't use self.history.filter for this as the now
            # cached history list may be used elsewhere.
            if script_result.status != SCRIPT_STATUS.PASSED:
                continue
            # LP: #1730799 - Old results may not have started set.
            if script_result.started is None:
                script_result.started = script_result.ended
                script_result.save(update_fields=["started"])
            previous_runtime = script_result.ended - script_result.started
            if runtime is None:
                runtime = previous_runtime
            else:
                runtime += previous_runtime
                runtime = runtime / 2
        if runtime is None:
            if self.script is not None and self.script.timeout != timedelta(0):
                # If there were no previous runs use the script's timeout.
                return str(self.script.timeout - timedelta(
                    microseconds=self.script.timeout.microseconds))
            else:
                return "Unknown"
        else:
            return str(runtime - timedelta(microseconds=runtime.microseconds))

    def __str__(self):
        return "%s/%s" % (self.script_set.node.system_id, self.name)

    def read_results(self):
        """Read the results YAML file and validate it."""
        try:
            parsed_yaml = yaml.safe_load(self.result)
        except yaml.YAMLError as err:
            raise ValidationError(err)

        if parsed_yaml is None:
            # No results were given.
            return {}
        elif not isinstance(parsed_yaml, dict):
            raise ValidationError("YAML must be a dictionary.")

        if parsed_yaml.get("status") not in [
                "passed",
                "failed",
                "degraded",
                "timedout",
                "skipped",
                None,
        ]:
            raise ValidationError(
                'status must be "passed", "failed", "degraded", '
                '"timedout", or "skipped".')

        link_connected = parsed_yaml.get("link_connected")
        if link_connected is not None:
            if not self.interface:
                raise ValidationError(
                    "link_connected may only be specified if the Script "
                    "accepts an interface parameter.")
            if not isinstance(link_connected, bool):
                raise ValidationError("link_connected must be a boolean")

        results = parsed_yaml.get("results")
        if results is None:
            # Results are not defined.
            return parsed_yaml
        elif isinstance(results, dict):
            for key, value in results.items():
                if not isinstance(key, str):
                    raise ValidationError(
                        "All keys in the results dictionary must be strings.")

                if not isinstance(value, list):
                    value = [value]
                for i in value:
                    if type(i) not in [str, float, int, bool]:
                        raise ValidationError(
                            "All values in the results dictionary must be "
                            "a string, float, int, or bool.")
        else:
            raise ValidationError("results must be a dictionary.")

        return parsed_yaml

    def store_result(
        self,
        exit_status=None,
        output=None,
        stdout=None,
        stderr=None,
        result=None,
        script_version_id=None,
        timedout=False,
        runtime=None,
    ):
        # Controllers and Pods are allowed to overwrite their results during any status
        # to prevent new ScriptSets being created everytime a controller
        # starts. This also allows us to avoid creating an RPC call for the
        # rack controller to create a new ScriptSet.
        if (not self.script_set.node.is_controller
                and not self.script_set.node.is_pod):
            # Allow PENDING, APPLYING_NETCONF, INSTALLING, and RUNNING scripts
            # incase the node didn't inform MAAS the Script was being run, it
            # just uploaded results.
            assert self.status in SCRIPT_STATUS_RUNNING_OR_PENDING

        if timedout:
            self.status = SCRIPT_STATUS.TIMEDOUT
        elif exit_status is not None:
            self.exit_status = exit_status
            if exit_status == 0:
                self.status = SCRIPT_STATUS.PASSED
            elif self.status == SCRIPT_STATUS.INSTALLING:
                self.status = SCRIPT_STATUS.FAILED_INSTALLING
            elif self.status == SCRIPT_STATUS.APPLYING_NETCONF:
                self.status = SCRIPT_STATUS.FAILED_APPLYING_NETCONF
            else:
                self.status = SCRIPT_STATUS.FAILED

        if output is not None:
            self.output = Bin(output)
        if stdout is not None:
            self.stdout = Bin(stdout)
        if stderr is not None:
            self.stderr = Bin(stderr)
        if result is not None:
            self.result = Bin(result)
            try:
                parsed_yaml = self.read_results()
            except ValidationError as err:
                err_msg = (
                    "%s(%s) sent a script result with invalid YAML: %s" % (
                        self.script_set.node.fqdn,
                        self.script_set.node.system_id,
                        err.message,
                    ))
                logger.error(err_msg)
                Event.objects.create_node_event(
                    system_id=self.script_set.node.system_id,
                    event_type=EVENT_TYPES.SCRIPT_RESULT_ERROR,
                    event_description=err_msg,
                )
            else:
                status = parsed_yaml.get("status")
                if status == "passed":
                    self.status = SCRIPT_STATUS.PASSED
                elif status == "failed":
                    self.status = SCRIPT_STATUS.FAILED
                elif status == "degraded":
                    self.status = SCRIPT_STATUS.DEGRADED
                elif status == "timedout":
                    self.status = SCRIPT_STATUS.TIMEDOUT
                elif status == "skipped":
                    self.status = SCRIPT_STATUS.SKIPPED

                link_connected = parsed_yaml.get("link_connected")
                if self.interface and isinstance(link_connected, bool):
                    self.interface.link_connected = link_connected
                    self.interface.save(update_fields=["link_connected"])

        if self.script:
            if script_version_id is not None:
                for script in self.script.script.previous_versions():
                    if script.id == script_version_id:
                        self.script_version = script
                        break
                if self.script_version is None:
                    err_msg = (
                        "%s(%s) sent a script result for %s(%d) with an "
                        "unknown script version(%d)." % (
                            self.script_set.node.fqdn,
                            self.script_set.node.system_id,
                            self.script.name,
                            self.script.id,
                            script_version_id,
                        ))
                    logger.error(err_msg)
                    Event.objects.create_node_event(
                        system_id=self.script_set.node.system_id,
                        event_type=EVENT_TYPES.SCRIPT_RESULT_ERROR,
                        event_description=err_msg,
                    )
            else:
                # If no script version was given assume the latest version
                # was run.
                self.script_version = self.script.script

        # If commissioning result check if its a builtin script, if so run its
        # hook before committing to the database.
        if (self.script_set.result_type == RESULT_TYPE.COMMISSIONING
                and self.name in NODE_INFO_SCRIPTS and stdout is not None):
            post_process_hook = NODE_INFO_SCRIPTS[self.name]["hook"]
            err = ("%s(%s): commissioning script '%s' failed during "
                   "post-processing." % (
                       self.script_set.node.fqdn,
                       self.script_set.node.system_id,
                       self.name,
                   ))
            # Circular imports.
            from metadataserver.api import try_or_log_event

            signal_status = try_or_log_event(
                self.script_set.node,
                None,
                err,
                post_process_hook,
                node=self.script_set.node,
                output=self.stdout,
                exit_status=self.exit_status,
            )
            # If the script failed to process mark the script as failed to
            # prevent testing from running and help users identify where
            # the error came from. This can happen when a commissioning
            # script generated invalid output.
            if signal_status is not None:
                self.status = SCRIPT_STATUS.FAILED

        if (self.status == SCRIPT_STATUS.PASSED and self.script
                and self.script.script_type == SCRIPT_TYPE.COMMISSIONING
                and self.script.recommission):
            self.script_set.scriptresult_set.filter(
                script_name__in=NODE_INFO_SCRIPTS).update(
                    status=SCRIPT_STATUS.PENDING,
                    started=None,
                    ended=None,
                    updated=now(),
                )

        self.save(runtime=runtime)

    @property
    def history(self):
        qs = ScriptResult.objects.filter(
            script_set__node_id=self.script_set.node_id)
        if self.script is not None:
            qs = qs.filter(script=self.script)
        else:
            qs = qs.filter(script_name=self.script_name)
        # XXX ltrager 2017-10-05 - Shows script runs from before MAAS supported
        # the hardware type or physical_blockdevice fields in history.
        # Solves LP: #1721524
        qs = qs.filter(
            Q(physical_blockdevice=self.physical_blockdevice)
            | Q(physical_blockdevice__isnull=True))
        qs = qs.order_by("-id")
        return qs

    def save(self, *args, runtime=None, **kwargs):
        if self.started is None and self.status == SCRIPT_STATUS.RUNNING:
            self.started = datetime.now()
            if "update_fields" in kwargs:
                kwargs["update_fields"].append("started")
        elif self.ended is None and self.status not in (
                SCRIPT_STATUS_RUNNING_OR_PENDING):
            self.ended = datetime.now()
            if "update_fields" in kwargs:
                kwargs["update_fields"].append("ended")
            # LP: #1730799 - If a script is run quickly the POST telling MAAS
            # the script has started comes in after the POST telling MAAS the
            # result.
            if self.started is None:
                if runtime:
                    self.started = self.ended - timedelta(seconds=runtime)
                else:
                    self.started = self.ended
                if "update_fields" in kwargs:
                    kwargs["update_fields"].append("started")

        if self.id is None:
            purge_unlinked_blockdevice = False
            purge_unlinked_interface = False
            for param in self.parameters.values():
                if "value" in param and isinstance(param["value"], dict):
                    if "physical_blockdevice" in param["value"]:
                        self.physical_blockdevice = param["value"].pop(
                            "physical_blockdevice")
                        param["value"][
                            "physical_blockdevice_id"] = self.physical_blockdevice.id
                        purge_unlinked_blockdevice = True
                    elif "interface" in param["value"]:
                        self.interface = param["value"].pop("interface")
                        param["value"]["interface_id"] = self.interface.id
                        purge_unlinked_interface = True
            if True in {purge_unlinked_blockdevice, purge_unlinked_interface}:
                # Cleanup previous ScriptResults which failed to map to a
                # required device in a previous run. This may happen due to an
                # issue during commissioning such as not finding devices.
                qs = ScriptResult.objects.filter(
                    script=self.script, script_set__node=self.script_set.node)
                # Exclude passed results as they must of been from a previous
                # version of the script which did not require parameters. 2.7
                # adds interface support and the internet-connectivity test
                # has been extended to support interface parameters.
                qs = qs.exclude(status=SCRIPT_STATUS.PASSED)
                if purge_unlinked_blockdevice:
                    qs = qs.filter(physical_blockdevice=None)
                if purge_unlinked_interface:
                    qs = qs.filter(interface=None)
                qs.delete()

        return super().save(*args, **kwargs)
Example #27
0
class RootKey(TimestampedModel):
    """A root key for signing macaroons."""

    id = BigAutoField(primary_key=True, verbose_name="ID")
    material = BinaryField()
    expiration = DateTimeField()
Example #28
0
class TransactionNow(Func):
    template = 'CURRENT_TIMESTAMP'
    output_field = DateTimeField()
Example #29
0
class ExtractSecond(Extract):
    lookup_name = 'second'


DateField.register_lookup(ExtractYear)
DateField.register_lookup(ExtractMonth)
DateField.register_lookup(ExtractDay)
DateField.register_lookup(ExtractWeekDay)
DateField.register_lookup(ExtractWeek)
DateField.register_lookup(ExtractQuarter)

TimeField.register_lookup(ExtractHour)
TimeField.register_lookup(ExtractMinute)
TimeField.register_lookup(ExtractSecond)

DateTimeField.register_lookup(ExtractHour)
DateTimeField.register_lookup(ExtractMinute)
DateTimeField.register_lookup(ExtractSecond)

ExtractYear.register_lookup(YearExact)
ExtractYear.register_lookup(YearGt)
ExtractYear.register_lookup(YearGte)
ExtractYear.register_lookup(YearLt)
ExtractYear.register_lookup(YearLte)


class TruncBase(TimezoneMixin, Transform):
    arity = 1
    kind = None
    tzinfo = None
Example #30
0
from django.db.models import Transform
from django.db.models import DateTimeField, TimeField
from django.utils.functional import cached_property


class TimeValue(Transform):
    lookup_name = 'time'
    function = 'time'

    def as_sql(self, compiler, connection):
        lhs, params = compiler.compile(self.lhs)
        return 'TIME({})'.format(lhs), params

    @cached_property
    def output_field(self):
        return TimeField()


DateTimeField.register_lookup(TimeValue)
Example #31
0
    def get(self, request, organization):
        """
        Fetches alert rules and legacy rules for an organization
        """
        project_ids = self.get_requested_project_ids(request) or None
        if project_ids == {-1}:  # All projects for org:
            project_ids = Project.objects.filter(
                organization=organization).values_list("id", flat=True)
        elif project_ids is None:  # All projects for user
            org_team_list = Team.objects.filter(
                organization=organization).values_list("id", flat=True)
            user_team_list = OrganizationMemberTeam.objects.filter(
                organizationmember__user=request.user,
                team__in=org_team_list).values_list("team", flat=True)
            project_ids = Project.objects.filter(
                teams__in=user_team_list).values_list("id", flat=True)

        # Materialize the project ids here. This helps us to not overwhelm the query planner with
        # overcomplicated subqueries. Previously, this was causing Postgres to use a suboptimal
        # index to filter on.
        project_ids = list(project_ids)

        teams = request.GET.getlist("team", [])
        team_filter_query = None
        if len(teams) > 0:
            try:
                teams_query, unassigned = parse_team_params(
                    request, organization, teams)
            except InvalidParams as err:
                return Response(str(err), status=status.HTTP_400_BAD_REQUEST)

            team_filter_query = Q(
                owner_id__in=teams_query.values_list("actor_id", flat=True))
            if unassigned:
                team_filter_query = team_filter_query | Q(owner_id=None)

        alert_rules = AlertRule.objects.fetch_for_organization(
            organization, project_ids)
        if not features.has("organizations:performance-view", organization):
            # Filter to only error alert rules
            alert_rules = alert_rules.filter(
                snuba_query__dataset=Dataset.Events.value)
        issue_rules = Rule.objects.filter(
            status__in=[RuleStatus.ACTIVE, RuleStatus.INACTIVE],
            project__in=project_ids)
        name = request.GET.get("name", None)
        if name:
            alert_rules = alert_rules.filter(Q(name__icontains=name))
            issue_rules = issue_rules.filter(Q(label__icontains=name))

        if team_filter_query:
            alert_rules = alert_rules.filter(team_filter_query)
            issue_rules = issue_rules.filter(team_filter_query)

        expand = request.GET.getlist("expand", [])
        if "latestIncident" in expand:
            alert_rules = alert_rules.annotate(incident_id=Coalesce(
                Subquery(
                    Incident.objects.filter(alert_rule=OuterRef(
                        "pk")).order_by("-date_started").values("id")[:1]),
                Value("-1"),
            ))

        is_asc = request.GET.get("asc", False) == "1"
        sort_key = request.GET.getlist("sort", ["date_added"])
        rule_sort_key = [
            "label" if x == "name" else x for x in sort_key
        ]  # Rule's don't share the same field name for their title/label/name...so we account for that here.
        case_insensitive = sort_key == ["name"]

        if "incident_status" in sort_key:
            alert_rules = alert_rules.annotate(incident_status=Coalesce(
                Subquery(
                    Incident.objects.filter(alert_rule=OuterRef(
                        "pk")).order_by("-date_started").values("status")[:1]),
                Value(-1, output_field=IntegerField()),
            ))
            issue_rules = issue_rules.annotate(
                incident_status=Value(-2, output_field=IntegerField()))

        if "date_triggered" in sort_key:
            far_past_date = Value(make_aware(datetime.min),
                                  output_field=DateTimeField())
            alert_rules = alert_rules.annotate(date_triggered=Coalesce(
                Subquery(
                    Incident.objects.filter(alert_rule=OuterRef("pk")).
                    order_by("-date_started").values("date_started")[:1]),
                far_past_date,
            ), )
            issue_rules = issue_rules.annotate(date_triggered=far_past_date)
        alert_rule_intermediary = CombinedQuerysetIntermediary(
            alert_rules, sort_key)
        rule_intermediary = CombinedQuerysetIntermediary(
            issue_rules, rule_sort_key)
        return self.paginate(
            request,
            paginator_cls=CombinedQuerysetPaginator,
            on_results=lambda x: serialize(
                x, request.user, CombinedRuleSerializer(expand=expand)),
            default_per_page=25,
            intermediaries=[alert_rule_intermediary, rule_intermediary],
            desc=not is_asc,
            cursor_cls=StringCursor if case_insensitive else Cursor,
            case_insensitive=case_insensitive,
        )
Example #32
0
                # filter by function provided in suffix
                try:
                    intyp = self.model._meta.get_field(flt).get_internal_type()
                except:
                    intyp = "CharField"

                # for greater than date checks we want to force the time to 1
                # msecond before midnight
                if intyp in date_fields:
                    if m.group(2) in ["gt", "lte"]:
                        if len(v) == 10:
                            v = "%s 23:59:59.999" % v

                    # convert to datetime and make tz aware
                    try:
                        v = DateTimeField().to_python(v)
                    except ValidationError, inst:
                        raise RestValidationError({"detail": str(inst[0])})
                    if timezone.is_naive(v):
                        v = timezone.make_aware(v)
                    if "_ctf" in self.request.query_params:
                        self.request._ctf = {
                            "%s__%s" % (m.group(1), m.group(2)): v
                        }

                # contains should become icontains because we always
                # want it to do case-insensitive checks
                if m.group(2) == "contains":
                    filters["%s__icontains" % flt] = v
                elif m.group(2) == "startswith":
                    filters["%s__istartswith" % flt] = v
 def __init__(self, *args, **kwargs):
     kwargs.setdefault('editable', False)
     kwargs.setdefault('blank', True)
     kwargs.setdefault('default', datetime_now)
     DateTimeField.__init__(self, *args, **kwargs)
 def __init__(self, *args, **kwargs):
     kwargs.setdefault('verbose_name', _('Date of Birth'))
     kwargs.setdefault('editable', True)
     kwargs.setdefault('help_text', _('Format is YYYY-MM-DD'))
     # self.validators.append(datetime_is_not_future)
     DateTimeField.__init__(self, *args, **kwargs)
class Event(Schedulable):
    '''
    An Event is a schedulable item with a conference model item as its payload.
    '''
    objects = InheritanceManager()
    eventitem = ForeignKey(EventItem,
                           on_delete=CASCADE,
                           related_name="scheduler_events")
    starttime = DateTimeField(blank=True)
    max_volunteer = PositiveIntegerField(default=0)
    approval_needed = BooleanField(default=False)
    max_commitments = PositiveIntegerField(default=0)

    def has_commitment_space(self, commitment_class_name):
        from scheduler.models import Ordering
        return (Ordering.objects.filter(
            allocation__event=self, class_name=commitment_class_name).count() <
                self.max_commitments)

    @property
    def foreign_event_id(self):
        return self.eventitem.eventitem_id

    # New - fits scheduling API refactor
    def set_locations(self, locations):
        '''
        Takes a LIST of locations, removes all existing location settings
        and replaces them with the given list.  Locations are expected to be
        location items
        '''
        from scheduler.models import ResourceAllocation
        for assignment in self.resources_allocated.all():
            if assignment.resource.as_subtype.__class__.__name__ == "Location":
                assignment.delete()
        for location in locations:
            if location is not None:
                try:
                    loc = Location.objects.select_subclasses().get(
                        _item=location)
                except:
                    loc = Location(_item=location)
                    loc.save()
                ra = ResourceAllocation(resource=loc, event=self)
                ra.save()

    # New - from refactoring
    @property
    def people(self):
        people = []
        for booking in self.resources_allocated.all():
            if booking.resource.as_subtype.__class__.__name__ == "Worker":
                person = Person(booking=booking)
                if hasattr(booking, 'label'):
                    person.label = booking.label.text
                people += [person]
        return people

    # New - from refactoring
    def allocate_person(self, person):
        '''
        allocated worker for the new model - right now, focused on create
        uses the Person from the data_transfer objects.
        '''
        from scheduler.idd import get_schedule
        from scheduler.models import (
            Ordering,
            ResourceAllocation,
        )

        warnings = []
        time_format = GBE_DATETIME_FORMAT

        worker = None
        if person.public_id:
            item = WorkerItem.objects.get(pk=person.public_id)
            worker = Worker(_item=item, role=person.role)
        else:
            worker = Worker(_item=person.user.profile, role=person.role)
            # TODO is there a leak here?  what happens to old workers
            # that aren't linked??
        worker.save()

        if person.users:
            users = person.users
        else:
            users = [worker.workeritem.user_object]

        for user in users:
            for conflict in get_schedule(
                    user=user, start_time=self.start_time,
                    end_time=self.end_time).schedule_items:
                if not person.booking_id or (person.booking_id !=
                                             conflict.booking_id):
                    warnings += [
                        Warning(code="SCHEDULE_CONFLICT",
                                user=user,
                                occurrence=conflict.event)
                    ]
        if person.booking_id:
            allocation = ResourceAllocation.objects.get(id=person.booking_id)
            allocation.resource = worker
            allocation.event = self
        else:
            allocation = ResourceAllocation(event=self, resource=worker)
        allocation.save()
        if person.commitment:
            ordering, created = Ordering.objects.get_or_create(
                allocation=allocation)
            if person.commitment.role is not None:
                ordering.role = person.commitment.role
            if person.commitment.order:
                ordering.order = person.commitment.order
            ordering.class_name = person.commitment.class_name
            ordering.class_id = person.commitment.class_id
            ordering.save()
        if self.extra_volunteers() > 0:
            warnings += [
                Warning(code="OCCURRENCE_OVERBOOKED",
                        details="Over booked by %s volunteers" %
                        (self.extra_volunteers()))
            ]
        if person.label:
            # refactor
            from scheduler.models import Label
            l, created = Label.objects.get_or_create(allocation=allocation)
            l.text = person.label
            l.save()
        return BookingResponse(warnings=warnings,
                               booking_id=allocation.pk,
                               occurrence=self)

    def role_count(self, role="Volunteer"):
        allocations = self.resources_allocated.all()
        participants = allocations.filter(resource__worker__role=role).count()
        return participants

    @property
    def event_type_name(self):
        '''
        Get event type name. Uses a database call
        '''
        return self.event_type.__name__

    @property
    def event_type(self):
        '''
        Get event's underlying type (ie, conference model)
        '''
        return type(self.as_subtype)

    @property
    def as_subtype(self):
        '''
        Get the representation of this Event as its underlying conference type
        '''
        return EventItem.objects.get_subclass(eventitem_id=self.eventitem_id)

    @property
    def duration(self):
        return self.eventitem.child().sched_duration

    def __str__(self):
        return self.eventitem.describe

    @property
    def location(self):
        l = Location.objects.filter(allocations__event=self)
        if len(l) > 0:
            return l[0]._item
        else:
            return None  # or what??

    def extra_volunteers(self):
        '''
        The difference between the max suggested # of volunteers
        and the actual number
        > 0 if there are too many volunteers for the max. The number
        will be the # of people over booked
        (if there are 3 spaces, and 4 volunteers, the value returned is 1)
        = 0 if it is at capacity
        < 0 if it is fewer than the max, the abosolute value is the
        amount of space remaining (if there are 4 spaces, and 3 volunteers,
        the value will be -1)
        '''
        count = Worker.objects.filter(allocations__event=self,
                                      role='Volunteer').count()
        return count - self.max_volunteer

    # New with Scheduler API
    @property
    def labels(self):
        return self.eventlabel_set.values_list('text', flat=True)
 def __init__(self, *args, **kwargs):
     kwargs.setdefault("editable", False)
     kwargs.setdefault("blank", True)
     kwargs.setdefault("default", datetime_now)
     DateTimeField.__init__(self, *args, **kwargs)
Example #37
0
    def get_context_data(self, **kwargs):
        ctx = super().get_context_data(**kwargs)
        tz = timezone.get_current_timezone()

        if 'latest' in self.request.GET:
            clear_cache(self.request.event)

        subevent = None
        if self.request.GET.get("subevent",
                                "") != "" and self.request.event.has_subevents:
            i = self.request.GET.get("subevent", "")
            try:
                subevent = self.request.event.subevents.get(pk=i)
            except SubEvent.DoesNotExist:
                pass

        cache = self.request.event.cache
        ckey = str(subevent.pk) if subevent else 'all'

        p_date = OrderPayment.objects.filter(
            order=OuterRef('pk'),
            state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED,
                       OrderPayment.PAYMENT_STATE_REFUNDED),
            payment_date__isnull=False).values('order').annotate(
                m=Max('payment_date')).values('m').order_by()
        op_date = OrderPayment.objects.filter(
            order=OuterRef('order'),
            state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED,
                       OrderPayment.PAYMENT_STATE_REFUNDED),
            payment_date__isnull=False).values('order').annotate(
                m=Max('payment_date')).values('m').order_by()

        # Orders by day
        ctx['obd_data'] = cache.get('statistics_obd_data' + ckey)
        if not ctx['obd_data']:
            oqs = Order.objects.annotate(
                payment_date=Subquery(p_date, output_field=DateTimeField()))
            if subevent:
                oqs = oqs.filter(positions__subevent_id=subevent,
                                 positions__canceled=False).distinct()

            ordered_by_day = {}
            for o in oqs.filter(event=self.request.event).values('datetime'):
                day = o['datetime'].astimezone(tz).date()
                ordered_by_day[day] = ordered_by_day.get(day, 0) + 1
            paid_by_day = {}
            for o in oqs.filter(
                    event=self.request.event,
                    payment_date__isnull=False).values('payment_date'):
                day = o['payment_date'].astimezone(tz).date()
                paid_by_day[day] = paid_by_day.get(day, 0) + 1

            data = []
            for d in dateutil.rrule.rrule(
                    dateutil.rrule.DAILY,
                    dtstart=min(ordered_by_day.keys())
                    if ordered_by_day else datetime.date.today(),
                    until=max(
                        max(ordered_by_day.keys(
                        ) if paid_by_day else [datetime.date.today()]),
                        max(paid_by_day.keys(
                        ) if paid_by_day else [datetime.date(1970, 1, 1)]))):
                d = d.date()
                data.append({
                    'date': d.strftime('%Y-%m-%d'),
                    'ordered': ordered_by_day.get(d, 0),
                    'paid': paid_by_day.get(d, 0)
                })

            ctx['obd_data'] = json.dumps(data)
            cache.set('statistics_obd_data' + ckey, ctx['obd_data'])

        # Orders by product
        ctx['obp_data'] = cache.get('statistics_obp_data' + ckey)
        if not ctx['obp_data']:
            opqs = OrderPosition.objects
            if subevent:
                opqs = opqs.filter(subevent=subevent)
            num_ordered = {
                p['item']: p['cnt']
                for p in (opqs.filter(
                    order__event=self.request.event).values('item').annotate(
                        cnt=Count('id')).order_by())
            }
            num_paid = {
                p['item']: p['cnt']
                for p in (opqs.filter(order__event=self.request.event,
                                      order__status=Order.STATUS_PAID).
                          values('item').annotate(cnt=Count('id')).order_by())
            }
            item_names = {
                i.id: str(i)
                for i in Item.objects.filter(event=self.request.event)
            }
            ctx['obp_data'] = json.dumps([{
                'item': item_names[item],
                'ordered': cnt,
                'paid': num_paid.get(item, 0)
            } for item, cnt in num_ordered.items()])
            cache.set('statistics_obp_data' + ckey, ctx['obp_data'])

        ctx['rev_data'] = cache.get('statistics_rev_data' + ckey)
        if not ctx['rev_data']:
            rev_by_day = {}
            if subevent:
                for o in OrderPosition.objects.annotate(payment_date=Subquery(
                        op_date, output_field=DateTimeField())).filter(
                            order__event=self.request.event,
                            subevent=subevent,
                            order__status=Order.STATUS_PAID,
                            payment_date__isnull=False).values(
                                'payment_date', 'price'):
                    day = o['payment_date'].astimezone(tz).date()
                    rev_by_day[day] = rev_by_day.get(day, 0) + o['price']
            else:
                for o in Order.objects.annotate(payment_date=Subquery(
                        p_date, output_field=DateTimeField())).filter(
                            event=self.request.event,
                            status=Order.STATUS_PAID,
                            payment_date__isnull=False).values(
                                'payment_date', 'total'):
                    day = o['payment_date'].astimezone(tz).date()
                    rev_by_day[day] = rev_by_day.get(day, 0) + o['total']

            data = []
            total = 0
            for d in dateutil.rrule.rrule(
                    dateutil.rrule.DAILY,
                    dtstart=min(rev_by_day.keys(
                    ) if rev_by_day else [datetime.date.today()]),
                    until=max(rev_by_day.keys(
                    ) if rev_by_day else [datetime.date.today()])):
                d = d.date()
                total += float(rev_by_day.get(d, 0))
                data.append({
                    'date': d.strftime('%Y-%m-%d'),
                    'revenue': round(total, 2),
                })
            ctx['rev_data'] = json.dumps(data)
            cache.set('statistics_rev_data' + ckey, ctx['rev_data'])

        ctx['has_orders'] = self.request.event.orders.exists()

        return ctx
 def __init__(self, *args, **kwargs):
     kwargs.setdefault('auto_now', True)
     DateTimeField.__init__(self, *args, **kwargs)
Example #39
0
class GameCountByNetwork(pg.MaterializedView):
    """
    A materialized view that serves extremely fast access to slightly-outdated counts of rating and games by network
    """

    concurrent_index = "network_id"

    sql = """
    SELECT
      counts.*,
      trainings_network.name as network_name,
      trainings_network.run_id as run_id,
      trainings_network.created_at as network_created_at
    FROM
    (
      SELECT
        COALESCE(trainingcounts.network_id, ratingcounts.network_id) as network_id,
        COALESCE(trainingcounts.total_num_training_games, 0) as total_num_training_games,
        COALESCE(trainingcounts.total_num_training_rows, 0) as total_num_training_rows,
        COALESCE(ratingcounts.total_num_rating_games, 0) as total_num_rating_games,
        COALESCE(ratingcounts.total_rating_score,0.0) as total_rating_score
      FROM (
        SELECT
        black_network_id as network_id,
        count(*) as total_num_training_games,
        sum(num_training_rows) as total_num_training_rows
        FROM games_traininggame
        GROUP BY black_network_id
      ) trainingcounts
      FULL OUTER JOIN
      (
        SELECT network_id, sum(num_games) as total_num_rating_games, sum(score) as total_rating_score
        FROM
        (
          (
            SELECT
            black_network_id as network_id,
            count(*) as num_games,
            sum(case when winner = 'B' then 1 when winner = 'W' then 0 else 0.5 end) as score
            FROM games_ratinggame
            GROUP BY black_network_id
          )
          UNION ALL
          (
            SELECT
            white_network_id as network_id,
            count(*) as num_games,
            sum(case when winner = 'W' then 1 when winner = 'B' then 0 else 0.5 end) as score
            FROM games_ratinggame
            GROUP BY white_network_id
          )
        ) subquery
        GROUP BY network_id
      ) ratingcounts
      ON trainingcounts.network_id = ratingcounts.network_id
    ) counts
    INNER JOIN
    trainings_network
    ON counts.network_id = trainings_network.id
    """

    network = OneToOneField(Network,
                            primary_key=True,
                            db_index=True,
                            db_constraint=False,
                            on_delete=DO_NOTHING)
    run = ForeignKey(Run,
                     db_index=True,
                     db_constraint=False,
                     on_delete=DO_NOTHING)
    network_name = CharField(max_length=128, db_index=True)
    network_created_at = DateTimeField(db_index=True)

    total_num_training_games = IntegerField(null=False, db_index=True)
    total_num_training_rows = IntegerField(null=False)
    total_num_rating_games = IntegerField(null=False, db_index=True)
    total_rating_score = IntegerField(null=False)

    class Meta:
        managed = False
        db_table = "games_gamecountbynetwork"
Example #40
0
class ExtractSecond(Extract):
    lookup_name = 'second'


DateField.register_lookup(ExtractYear)
DateField.register_lookup(ExtractMonth)
DateField.register_lookup(ExtractDay)
DateField.register_lookup(ExtractWeekDay)
DateField.register_lookup(ExtractWeek)

TimeField.register_lookup(ExtractHour)
TimeField.register_lookup(ExtractMinute)
TimeField.register_lookup(ExtractSecond)

DateTimeField.register_lookup(ExtractHour)
DateTimeField.register_lookup(ExtractMinute)
DateTimeField.register_lookup(ExtractSecond)

ExtractYear.register_lookup(YearExact)
ExtractYear.register_lookup(YearGt)
ExtractYear.register_lookup(YearGte)
ExtractYear.register_lookup(YearLt)
ExtractYear.register_lookup(YearLte)


class TruncBase(TimezoneMixin, Transform):
    arity = 1
    kind = None
    tzinfo = None
Example #41
0
def punishment(request, u=None, validated={}, *args, **kwargs):
  try:
    user = User.objects.get(id=u)
  except Exception as e:
    return 'non existent user queried - {}'.format(e), 403

  Punishment.objects\
            .annotate(completion=ExpressionWrapper(F('created_at') + F('length'),
                                                   output_field=DateTimeField()))\
            .filter(completion__lte=timezone.now(),
                    resolved=False,
                    length__isnull=False).update(resolved=True)

  if request.method == 'GET':
    punishments = Punishment.objects.filter(user=user)
    if validated['server'] is not None:
      server = Server.objects.get(id=validated['server'])
      punishments = punishments.filter(Q(server=server) | Q(server=None))

    if validated['resolved'] is not None:
      punishments = punishments.filter(resolved=validated['resolved'])

    if validated['muted'] is not None:
      punishments = punishments.filter(is_muted=validated['muted'])

    if validated['banned'] is not None:
      punishments = punishments.filter(is_banned=validated['banned'])

    if validated['gagged'] is not None:
      punishments = punishments.filter(is_gagged=validated['gagged'])

    if validated['kicked'] is not None:
      punishments = punishments.filter(is_kicked=validated['kicked'])

    return [p for p in punishments.annotate(admin=F('created_by__namespace'))
                                  .values('id',
                                          'user',
                                          'server',
                                          'created_at',
                                          'reason',
                                          'resolved',
                                          'created_by',
                                          'length',
                                          'is_banned',
                                          'is_kicked',
                                          'is_muted',
                                          'is_gagged',
                                          'admin')], 200

  elif request.method == 'PUT':
    if 'server' in validated:
      server = Server.objects.get(id=validated['server'])
    else:
      server = None

    if validated['length'] > 0:
      length = datetime.timedelta(seconds=validated['length'])
    else:
      length = None

    punishment = Punishment(user=user,
                            server=server,
                            reason=validated['reason'],
                            is_muted=validated['muted'],
                            is_gagged=validated['gagged'],
                            is_kicked=validated['kicked'],
                            is_banned=validated['banned'],
                            length=length,
                            created_by=request.user)
    punishment.save()

    if validated['plugin']:
      server = [server] if server else Server.objects.all()
      for s in server:
        if punishment.is_gagged or punishment.is_muted:
          SourcemodPluginWrapper(s).mutegag(punishment)
        if punishment.is_banned:
          SourcemodPluginWrapper(s).ban(punishment)
        if punishment.is_kicked:
          punishment.resolved = True
          punishment.save()
          SourcemodPluginWrapper(s).kick(punishment)