def ready(self): # Connections may already exist before we are called. for conn in connections.all(): if conn.connection is not None: register_type_handlers(conn) connection_created.connect(register_type_handlers) CharField.register_lookup(Unaccent) TextField.register_lookup(Unaccent) CharField.register_lookup(SearchLookup) TextField.register_lookup(SearchLookup) CharField.register_lookup(TrigramSimilar) TextField.register_lookup(TrigramSimilar)
def __init__(self, *args, **kwargs): kwargs.setdefault('editable', True) kwargs.setdefault('max_length', 7) kwargs.setdefault( 'help_text', _('The format is 999/999, but either of the two numbers can be two or three digits')) CharField.__init__(self, *args, **kwargs)
def __init__(self, *args, **kwargs): kwargs.setdefault('editable', False) kwargs.setdefault('blank', True) kwargs.setdefault('max_length', 50) kwargs.setdefault('verbose_name', 'Hostname') kwargs.setdefault('default', socket.gethostname()) CharField.__init__(self, *args, **kwargs)
def __init__(self, based_fields=None, zoom=None, suffix='', max_length=63, *args, **kwargs): super(PlainLocationField, self).__init__(based_fields=based_fields, zoom=zoom, suffix=suffix, *args, **kwargs) CharField.__init__(self, max_length=max_length, *args, **kwargs)
def __init__(self, *args, **kwargs): kwargs.setdefault('verbose_name', _('What type of identity number is this?')) kwargs.setdefault('editable', True) kwargs.setdefault('max_length', 15) kwargs.setdefault('choices', IDENTITY_TYPE) # kwargs.setdefault('help_text', _('Format is 9999[12]9999')) CharField.__init__(self, *args, **kwargs)
def __init__(self, *args, **kwargs): kwargs.setdefault('editable', False) kwargs.setdefault('blank', True) kwargs.setdefault('null', True) kwargs.setdefault('max_length', 150) kwargs.setdefault('verbose_name', 'Revision') CharField.__init__(self, *args, **kwargs)
def __init__(self, *args, **kwargs): kwargs.setdefault('editable', True) kwargs.setdefault('verbose_name', _('Initials')) kwargs.setdefault('max_length', 3) kwargs.setdefault( 'help_text', _('Type 2-3 letters, all in uppercase and no spaces')) CharField.__init__(self, *args, **kwargs)
def __init__(self, *args, **kwargs): kwargs['max_length'] = kwargs.get('max_length', 32 ) try: self.auto = kwargs['auto'] del kwargs['auto'] except KeyError: self.auto = True CharField.__init__(self, *args, **kwargs)
def __init__(self, max_length=63, *args, **kwargs): super(PlainLocationField, self).__init__(*args, **kwargs) kwargs.pop('based_fields', None) kwargs.pop('zoom', None) kwargs.pop('suffix', None) CharField.__init__(self, max_length=max_length, *args, **kwargs)
def contribute_to_class(self, cls, name): self.name = name self.fk_field_name = name + '_fk' self.ft_field_name = name + '_ft' setattr(cls, name, self) fk_field = ForeignKey(self.foreign_model, blank=True, null=True) fk_field.contribute_to_class(cls, self.fk_field_name) ft_field = CharField(max_length=255, blank=True) ft_field.contribute_to_class(cls, self.ft_field_name)
def __init__(self, verbose_name=None, name=None, auto=True, version=1, node=None, clock_seq=None, namespace=None, **kwargs): kwargs['max_length'] = 36 if auto: kwargs['blank'] = True kwargs.setdefault('editable', False) self.version = version if version==1: self.node, self.clock_seq = node, clock_seq elif version==3 or version==5: self.namespace, self.name = namespace, name CharField.__init__(self, verbose_name, name, **kwargs)
def test_transform(self): new_name = self.t1.name.upper() self.assertNotEqual(self.t1.name, new_name) Tag.objects.create(name=new_name) CharField.register_lookup(Lower) try: self.assertCountEqual( Tag.objects.order_by().distinct('name__lower'), [self.t1, self.t2, self.t3, self.t4, self.t5], ) finally: CharField._unregister_lookup(Lower)
def test_transform(self): query = Query(Author) CharField.register_lookup(Lower, 'lower') try: where = query.build_where(~Q(name__lower='foo')) finally: CharField._unregister_lookup(Lower, 'lower') lookup = where.children[0] self.assertIsInstance(lookup, Exact) self.assertIsInstance(lookup.lhs, Lower) self.assertIsInstance(lookup.lhs.lhs, SimpleCol) self.assertEqual(lookup.lhs.lhs.target, Author._meta.get_field('name'))
def contribute_to_class(self, cls, name): self.name = name self.fk_field_name = name + '_fk' self.ft_field_name = name + '_ft' setattr(cls, name, self) fk_kwargs = dict(blank=True, null=True) if self.related_name: fk_kwargs['related_name'] = self.related_name fk_field = ForeignKey(self.foreign_model, **fk_kwargs) fk_field.contribute_to_class(cls, self.fk_field_name) ft_field = CharField(max_length=255, blank=True, null=True, default='') ft_field.contribute_to_class(cls, self.ft_field_name)
def test_upper_transform(self): try: CharField.register_lookup(Upper, 'upper') Author.objects.create(name='John Smith', alias='smithj') Author.objects.create(name='Rhonda') authors = Author.objects.filter(name__upper__exact='JOHN SMITH') self.assertQuerysetEqual( authors.order_by('name'), [ 'John Smith', ], lambda a: a.name ) finally: CharField._unregister_lookup(Upper, 'upper')
def test_length_transform(self): try: CharField.register_lookup(Length, 'length') Author.objects.create(name='John Smith', alias='smithj') Author.objects.create(name='Rhonda') authors = Author.objects.filter(name__length__gt=7) self.assertQuerysetEqual( authors.order_by('name'), [ 'John Smith', ], lambda a: a.name ) finally: CharField._unregister_lookup(Length, 'length')
def __init__(self, verbose_name=None, name=None, auto=True, version=4, node=None, clock_seq=None, namespace=None, **kwargs): if not HAS_UUID: raise ImproperlyConfigured("'uuid' module is required for UUIDField. (Do you have Python 2.5 or higher installed ?)") kwargs.setdefault('max_length', 36) if auto: self.empty_strings_allowed = False kwargs['blank'] = True kwargs.setdefault('editable', False) self.auto = auto self.version = version if version == 1: self.node, self.clock_seq = node, clock_seq elif version == 3 or version == 5: self.namespace, self.name = namespace, name CharField.__init__(self, verbose_name, name, **kwargs)
def test_trim_transform(self): Author.objects.create(name=' John ') Author.objects.create(name='Rhonda') tests = ( (LTrim, 'John '), (RTrim, ' John'), (Trim, 'John'), ) for transform, trimmed_name in tests: with self.subTest(transform=transform): try: CharField.register_lookup(transform) authors = Author.objects.filter(**{'name__%s' % transform.lookup_name: trimmed_name}) self.assertQuerysetEqual(authors, [' John '], lambda a: a.name) finally: CharField._unregister_lookup(transform)
def test_func_transform_bilateral(self): class UpperBilateral(Upper): bilateral = True try: CharField.register_lookup(UpperBilateral, 'upper') Author.objects.create(name='John Smith', alias='smithj') Author.objects.create(name='Rhonda') authors = Author.objects.filter(name__upper__exact='john smith') self.assertQuerysetEqual( authors.order_by('name'), [ 'John Smith', ], lambda a: a.name ) finally: CharField._unregister_lookup(UpperBilateral, 'upper')
def contribute_to_class(self, model, name): super(MongoUserManager, self).contribute_to_class(model, name) self.dj_model = self.model self.model = get_user_document() self.dj_model.USERNAME_FIELD = self.model.USERNAME_FIELD username = CharField(_('username'), max_length=30, unique=True) username.contribute_to_class(self.dj_model, self.dj_model.USERNAME_FIELD) self.dj_model.REQUIRED_FIELDS = self.model.REQUIRED_FIELDS for name in self.dj_model.REQUIRED_FIELDS: field = CharField(_(name), max_length=30) field.contribute_to_class(self.dj_model, name) is_staff = BooleanField(_('is_staff'), default=False) is_staff.contribute_to_class(self.dj_model, 'is_staff') is_active = BooleanField(_('is_active'), default=False) is_active.contribute_to_class(self.dj_model, 'is_active') is_superuser = BooleanField(_('is_superuser'), default=False) is_superuser.contribute_to_class(self.dj_model, 'is_superuser') last_login = DateTimeField(_('last_login'), auto_now_add=True) last_login.contribute_to_class(self.dj_model, 'last_login') date_joined = DateTimeField(_('date_joined'), auto_now_add=True) date_joined.contribute_to_class(self.dj_model, 'date_joined')
def __init__( self, verbose_name=None, name=None, auto_if_unset=True, version=4, node=None, clock_seq=None, namespace=None, **kwargs ): kwargs["max_length"] = 512 if auto_if_unset: kwargs["blank"] = True kwargs.setdefault("editable", False) self.auto_if_unset = auto_if_unset self.version = version if version == 1: self.node, self.clock_seq = node, clock_seq elif version == 3 or version == 5: self.namespace, self.name = namespace, name CharField.__init__(self, verbose_name, name, **kwargs)
def ready(self): connection_created.connect(register_hstore_handler) CharField.register_lookup(Unaccent) TextField.register_lookup(Unaccent) CharField.register_lookup(SearchLookup) TextField.register_lookup(SearchLookup) CharField.register_lookup(TrigramSimilar) TextField.register_lookup(TrigramSimilar)
def add_lookups(self): from django.db.models import CharField, TextField from django_mysql.models.lookups import ( CaseSensitiveExact, Soundex, SoundsLike ) CharField.register_lookup(CaseSensitiveExact) CharField.register_lookup(SoundsLike) CharField.register_lookup(Soundex) TextField.register_lookup(CaseSensitiveExact) TextField.register_lookup(SoundsLike) TextField.register_lookup(Soundex)
def contribute_to_class(self, model, name): super(MongoUserManager, self).contribute_to_class(model, name) self.dj_model = self.model self.model = get_user_document() self.dj_model.USERNAME_FIELD = self.model.USERNAME_FIELD username = CharField(_('username'), max_length=30, unique=True) username.contribute_to_class(self.dj_model, self.dj_model.USERNAME_FIELD) self.dj_model.REQUIRED_FIELDS = self.model.REQUIRED_FIELDS for name in self.dj_model.REQUIRED_FIELDS: field = CharField(_(name), max_length=30) field.contribute_to_class(self.dj_model, name)
def uninstall_if_needed(setting, value, enter, **kwargs): """ Undo the effects of PostgresConfig.ready() when django.contrib.postgres is "uninstalled" by override_settings(). """ if not enter and setting == 'INSTALLED_APPS' and 'django.contrib.postgres' not in set(value): connection_created.disconnect(register_type_handlers) CharField._unregister_lookup(Unaccent) TextField._unregister_lookup(Unaccent) CharField._unregister_lookup(SearchLookup) TextField._unregister_lookup(SearchLookup) CharField._unregister_lookup(TrigramSimilar) TextField._unregister_lookup(TrigramSimilar) # Disconnect this receiver until the next time this app is installed # and ready() connects it again to prevent unnecessary processing on # each setting change. setting_changed.disconnect(uninstall_if_needed)
def ready(self): # Connections may already exist before we are called. for conn in connections.all(): if conn.vendor == 'postgresql': conn.introspection.data_types_reverse.update({ 3802: 'django.contrib.postgres.fields.JSONField', 3904: 'django.contrib.postgres.fields.IntegerRangeField', 3906: 'django.contrib.postgres.fields.DecimalRangeField', 3910: 'django.contrib.postgres.fields.DateTimeRangeField', 3912: 'django.contrib.postgres.fields.DateRangeField', 3926: 'django.contrib.postgres.fields.BigIntegerRangeField', }) if conn.connection is not None: register_type_handlers(conn) connection_created.connect(register_type_handlers) CharField.register_lookup(Unaccent) TextField.register_lookup(Unaccent) CharField.register_lookup(SearchLookup) TextField.register_lookup(SearchLookup) CharField.register_lookup(TrigramSimilar) TextField.register_lookup(TrigramSimilar)
class SessionFormSubmission(AbstractFormSubmission): session_key = CharField(max_length=40, null=True, default=None) user = ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, related_name='+', on_delete=PROTECT) thumbnails_by_path = TextField(default=json.dumps({})) last_modification = DateTimeField(_('last modification'), auto_now=True) INCOMPLETE = 'incomplete' COMPLETE = 'complete' REVIEWED = 'reviewed' APPROVED = 'approved' REJECTED = 'rejected' STATUSES = ( (INCOMPLETE, _('Not submitted')), (COMPLETE, _('In progress')), (REVIEWED, _('Under consideration')), (APPROVED, _('Approved')), (REJECTED, _('Rejected')), ) status = CharField(max_length=10, choices=STATUSES, default=INCOMPLETE) class Meta: verbose_name = _('form submission') verbose_name_plural = _('form submissions') unique_together = (('page', 'session_key'), ('page', 'user')) @property def is_complete(self): return self.status != self.INCOMPLETE @property def form_page(self): return self.page.specific def get_session(self): return import_module( settings.SESSION_ENGINE).SessionStore(session_key=self.session_key) def reset_step(self): session = self.get_session() try: del session[self.form_page.current_step_session_key] except KeyError: pass else: session.save() def get_storage(self): return self.form_page.get_storage() def get_thumbnail_path(self, path, width=64, height=64): if not path: return '' variant = '%s×%s' % (width, height) thumbnails_by_path = json.loads(self.thumbnails_by_path) thumbnails_paths = thumbnails_by_path.get(path) if thumbnails_paths is None: thumbnails_by_path[path] = {} else: thumbnail_path = thumbnails_paths.get(variant) if thumbnail_path is not None: return thumbnail_path path = Path(path) thumbnail_path = str(path.with_suffix('.%s%s' % (variant, path.suffix))) storage = self.get_storage() thumbnail_path = storage.get_available_name(thumbnail_path) thumbnail = Image.open(storage.path(path)) thumbnail.thumbnail((width, height)) thumbnail.save(storage.path(thumbnail_path)) thumbnails_by_path[str(path)][variant] = thumbnail_path self.thumbnails_by_path = json.dumps(thumbnails_by_path, cls=StreamFormJSONEncoder) self.save() return thumbnail_path def get_fields(self, by_step=False): return self.form_page.get_form_fields(by_step=by_step) def get_existing_thumbnails(self, path): thumbnails_paths = json.loads(self.thumbnails_by_path).get(path, {}) for thumbnail_path in thumbnails_paths.values(): yield thumbnail_path def get_files_by_field(self): data = self.get_data(raw=True) files = {} for name, field in self.get_fields().items(): if isinstance(field, FileField): path = data.get(name) if path: files[name] = [path] + list( self.get_existing_thumbnails(path)) return files def get_all_files(self): for paths in self.get_files_by_field().values(): for path in paths: yield path def delete_file(self, field_name): thumbnails_by_path = json.loads(self.thumbnails_by_path) for path in self.get_files_by_field().get(field_name, ()): self.get_storage().delete(path) if path in thumbnails_by_path: del thumbnails_by_path[path] self.thumbnails_by_path = json.dumps(thumbnails_by_path, cls=StreamFormJSONEncoder) self.save() def render_email(self, value): return (mark_safe('<a href="mailto:%s" target="_blank">%s</a>') % (value, value)) def render_link(self, value): return (mark_safe('<a href="%s" target="_blank">%s</a>') % (value, value)) def render_image(self, value): storage = self.get_storage() return ( mark_safe('<a href="%s" target="_blank"><img src="%s" /></a>') % (storage.url(value), storage.url(self.get_thumbnail_path(value)))) def render_file(self, value): return mark_safe('<a href="%s" target="_blank">%s</a>') % ( self.get_storage().url(value), Path(value).name) def format_value(self, field, value): if value is None or value == '': return '-' new_value = self.form_page.format_value(field, value) if new_value != value: return new_value if value is True: return 'Yes' if value is False: return 'No' if isinstance(value, (list, tuple)): return ', '.join( [self.format_value(field, item) for item in value]) if isinstance(value, datetime.date): return naturaltime(value) if isinstance(field, EmailField): return self.render_email(value) if isinstance(field, URLField): return self.render_link(value) if isinstance(field, ImageField): return self.render_image(value) if isinstance(field, FileField): return self.render_file(value) if isinstance(value, SafeData) or hasattr(value, '__html__'): return value return str(value) def format_db_field(self, field_name, raw=False): method = getattr(self, 'get_%s_display' % field_name, None) if method is not None: return method() value = getattr(self, field_name) if raw: return value return self.format_value( self._meta.get_field(field_name).formfield(), value) def get_steps_data(self, raw=False): steps_data = json.loads(self.form_data) if raw: return steps_data fields_and_data_iterator = zip_longest(self.get_fields(by_step=True), steps_data, fillvalue={}) return [ OrderedDict([(name, self.format_value(field, step_data.get(name))) for name, field in step_fields.items()]) for step_fields, step_data in fields_and_data_iterator ] def get_extra_data(self, raw=False): return self.form_page.get_extra_data(self, raw=raw) def get_data(self, raw=False, add_metadata=True): steps_data = self.get_steps_data(raw=raw) form_data = {} form_data.update(self.get_extra_data(raw=raw)) for step_data in steps_data: form_data.update(step_data) if add_metadata: form_data.update( status=self.format_db_field('status', raw=raw), user=self.format_db_field('user', raw=raw), submit_time=self.format_db_field('submit_time', raw=raw), last_modification=self.format_db_field('last_modification', raw=raw), ) return form_data def steps_with_data_iterator(self, raw=False): for step, step_data_fields, step_data in zip( self.form_page.get_steps(), self.form_page.get_data_fields(by_step=True), self.get_steps_data(raw=raw)): yield step, [(field_name, field_label, step_data[field_name]) for field_name, field_label in step_data_fields]
class PhysicalBlockDevice(BlockDevice): """A physical block device attached to a node.""" class Meta(DefaultMeta): """Needed for South to recognize this model.""" objects = PhysicalBlockDeviceManager() model = CharField( max_length=255, blank=True, null=False, help_text="Model name of block device.", ) serial = CharField( max_length=255, blank=True, null=False, help_text="Serial number of block device.", ) firmware_version = CharField( max_length=255, blank=True, null=True, help_text="Firmware version of block device.", ) # Only used when the machine is composed in a Pod that supports # storage pool. storage_pool = ForeignKey( PodStoragePool, blank=True, null=True, on_delete=SET_NULL, related_name="block_devices", help_text="Storage pool that this block device belongs to", ) numa_node = ForeignKey( "NUMANode", related_name="blockdevices", on_delete=CASCADE ) def __init__(self, *args, **kwargs): if kwargs: # only check when kwargs are passed, which is the normal case when # objects are created. If they're loaded from the DB, args get # passed instead. node = kwargs.get("node") numa_node = kwargs.get("numa_node") if node and numa_node: raise ValidationError("Can't set both node and numa_node") if not numa_node: kwargs["numa_node"] = node.default_numanode elif not node: kwargs["node"] = numa_node.node super().__init__(*args, **kwargs) def clean(self): if not self.id_path and not (self.model and self.serial): raise ValidationError( "serial/model are required if id_path is not provided." ) super(PhysicalBlockDevice, self).clean() def __str__(self): return "{model} S/N {serial} {size} attached to {node}".format( model=self.model, serial=self.serial, size=human_readable_bytes(self.size), node=self.node, )
def ready(self): CharField.register_lookup(Unaccent) TextField.register_lookup(Unaccent)
class PurchaseBankID(Model): client = ForeignKey(to="Client", db_index=True, on_delete=CASCADE) article = ForeignKey(to="Article", db_index=True, on_delete=CASCADE) authorityID = CharField(max_length=200, null=False, blank=False) platform = CharField(max_length=100, null=False, blank=False)
def time_off_reports(request, notification_id=None): sort_by = request.GET.get('sort_by') status = request.GET.get('status') time_off_type = request.GET.get('time_off_type') company_name = request.GET.get('company') date_range = request.GET.get('date_range') search = request.GET.get('search') pk = request.GET.get('pk') sort_choices = [ ('', 'Sort By'), ('employee__last_name', 'Last Name'), ('-request_date', 'Request Date'), ('employee__first_name', 'First Name'), ('employee_id', 'Employee ID'), ] start_date = datetime.datetime.strptime(date_range[:10], '%m/%d/%Y') if date_range else \ (datetime.datetime.today() - datetime.timedelta(days=30)) end_date = datetime.datetime.strptime(date_range[13:], '%m/%d/%Y') if date_range else \ (datetime.datetime.today() + datetime.timedelta(days=30)) if notification_id: notification = Notification.objects.get(id=notification_id) notification.mark_as_read() if pk: time_off_records = TimeOffRequest.objects.filter(pk=pk) else: time_off_records = TimeOffRequest.objects.filter( is_active=True, employee__is_active=True) if search: try: search = int(search) time_off_records = time_off_records.filter( employee__employee_id=search) except ValueError: time_off_records = TimeOffRequest.objects.annotate( full_name=Concat('employee__first_name', V(' '), 'employee__last_name', output_field=CharField())).filter( full_name__icontains=search, is_active=True, employee__is_active=True) if time_off_type: time_off_records = time_off_records.filter( time_off_type__exact=time_off_type) if status: time_off_records = time_off_records.filter(status__exact=status) if company_name: time_off_records = time_off_records.filter( employee__company__display_name=company_name) if start_date and end_date: time_off_records = time_off_records.filter( dayoff__requested_date__gte=start_date, dayoff__requested_date__lte=end_date) if sort_by: time_off_records = time_off_records.order_by(sort_by) f_form = TimeOffFilterForm(sort_choices=sort_choices, data={ 'sort_by': sort_by, 'status': status, 'time_off_type': time_off_type, 'company': company_name, 'date_range': date_range, 'search': search }) page = request.GET.get('page') paginator = Paginator(time_off_records, 25) page_obj = paginator.get_page(page) data = { 'page_obj': page_obj, 'f_form': f_form, 'start_date': start_date.strftime('%m/%d/%Y'), 'end_date': end_date.strftime('%m/%d/%Y'), } return render(request, 'operations/time_off_reports.html', data)
def get_queryset(self): # retrieve post request payload json_request = self.request.query_params # Retrieve fiscal_year & awarding_agency_id from Request fiscal_year = json_request.get("fiscal_year") awarding_agency_id = json_request.get("awarding_agency_id") # Optional Award Category award_category = json_request.get("award_category") # Required Query Parameters were not Provided if not (fiscal_year and awarding_agency_id): raise InvalidParameterException( "Missing one or more required query parameters: fiscal_year, awarding_agency_id" ) if not check_valid_toptier_agency(awarding_agency_id): raise InvalidParameterException( "Awarding Agency ID provided must correspond to a toptier agency" ) toptier_agency = Agency.objects.filter( id=awarding_agency_id).first().toptier_agency queryset = TransactionNormalized.objects.filter( federal_action_obligation__isnull=False) # DS-1655: if the AID is "097" (DOD), Include the branches of the military in the queryset if toptier_agency.toptier_code == DOD_CGAC: tta_list = DOD_ARMED_FORCES_CGAC queryset = queryset.filter( # Filter based on fiscal_year and awarding_category_id fiscal_year=fiscal_year, awarding_agency__toptier_agency__toptier_code__in=tta_list, ) else: queryset = queryset.filter( # Filter based on fiscal_year and awarding_category_id fiscal_year=fiscal_year, awarding_agency__toptier_agency__toptier_code=toptier_agency. toptier_code, ) queryset = queryset.annotate( award_category=F("award__category"), recipient_name=Coalesce( F("award__latest_transaction__assistance_data__awardee_or_recipient_legal" ), F("award__latest_transaction__contract_data__awardee_or_recipient_legal" ), ), ) if award_category is not None: # Filter based on award_category if award_category != "other": queryset = queryset.filter(award_category=award_category) else: queryset = queryset.filter( Q(award_category="insurance") | Q(award_category="other")).annotate(award_category=Case( When(award_category="insurance", then=Value("other")), output_field=CharField())) # Sum Obligations for each Recipient queryset = (queryset.values( "award_category", "recipient_name").annotate(obligated_amount=Sum( "federal_action_obligation")).order_by("-obligated_amount")) return queryset
class SubmissionRevision(Model): CREATED = 'created' CHANGED = 'changed' DELETED = 'deleted' TYPES = ( (CREATED, _('Created')), (CHANGED, _('Changed')), (DELETED, _('Deleted')), ) type = CharField(max_length=7, choices=TYPES) created_at = DateTimeField(auto_now_add=True) submission_ct = ForeignKey('contenttypes.ContentType', on_delete=CASCADE) submission_id = TextField() submission = GenericForeignKey('submission_ct', 'submission_id') data = TextField() summary = TextField() objects = SubmissionRevisionQuerySet.as_manager() class Meta: ordering = ('-created_at', ) @staticmethod def get_filters_for(submission): return { 'submission_ct': ContentType.objects.get_for_model(submission._meta.model), 'submission_id': str(submission.pk), } @classmethod def diff_summary(cls, page, data1, data2): diff = [] data_fields = page.get_data_fields() hidden_types = (tuple, list, dict) for k, label in data_fields: value1 = data1.get(k) value2 = data2.get(k) if value2 == value1 or not value1 and not value2: continue is_hidden = (isinstance(value1, hidden_types) or isinstance(value2, hidden_types)) # Escapes newlines as they are used as separator inside summaries. if isinstance(value1, str): value1 = value1.replace('\n', r'\n') if isinstance(value2, str): value2 = value2.replace('\n', r'\n') if value2 and not value1: diff.append(((_('“%s” set.') % label) if is_hidden else (_('“%s” set to “%s”.')) % (label, value2))) elif value1 and not value2: diff.append(_('“%s” unset.') % label) else: diff.append(((_('“%s” changed.') % label) if is_hidden else (_('“%s” changed from “%s” to “%s”.') % (label, value1, value2)))) return '\n'.join(diff) @classmethod def create_from_submission(cls, submission, revision_type): page = submission.form_page try: previous = cls.objects.for_submission(submission).latest( 'created_at') except cls.DoesNotExist: previous_data = {} else: previous_data = previous.get_data() filters = cls.get_filters_for(submission) data = submission.get_data(raw=True, add_metadata=False) data['status'] = submission.status if revision_type == cls.CREATED: summary = _('Submission created.') elif revision_type == cls.DELETED: summary = _('Submission deleted.') else: summary = cls.diff_summary(page, previous_data, data) if not summary: # Nothing changed. return filters.update( type=revision_type, data=json.dumps(data, cls=StreamFormJSONEncoder), summary=summary, ) return cls.objects.create(**filters) def get_data(self): return json.loads(self.data)
class Request(models.Model): id = CharField(max_length=36, default=uuid4, primary_key=True) path = CharField(max_length=190, db_index=True) query_params = TextField(blank=True, default='') raw_body = TextField(blank=True, default='') body = TextField(blank=True, default='') method = CharField(max_length=10) start_time = DateTimeField(default=timezone.now, db_index=True) view_name = CharField(max_length=190, db_index=True, blank=True, default='', null=True) end_time = DateTimeField(null=True, blank=True) time_taken = FloatField(blank=True, null=True) encoded_headers = TextField(blank=True, default='') # stores json meta_time = FloatField(null=True, blank=True) meta_num_queries = IntegerField(null=True, blank=True) meta_time_spent_queries = FloatField(null=True, blank=True) pyprofile = TextField(blank=True, default='') prof_file = FileField(max_length=300, blank=True, storage=silk_storage) @property def total_meta_time(self): return (self.meta_time or 0) + (self.meta_time_spent_queries or 0) @property def profile_table(self): for n, columns in enumerate(parse_profile(self.pyprofile)): location = columns[-1] if n and '{' not in location and '<' not in location: r = re.compile('(?P<src>.*\.py)\:(?P<num>[0-9]+).*') m = r.search(location) group = m.groupdict() src = group['src'] num = group['num'] name = 'c%d' % n fmt = '<a name={name} href="?pos={n}&file_path={src}&line_num={num}#{name}">{location}</a>' rep = fmt.format(**dict(group, **locals())) yield columns[:-1] + [mark_safe(rep)] else: yield columns # defined in atomic transaction within SQLQuery save()/delete() as well # as in bulk_create of SQLQueryManager # TODO: This is probably a bad way to do this, .count() will prob do? num_sql_queries = IntegerField(default=0) # TODO replace with count() @property def time_spent_on_sql_queries(self): """ TODO: Perhaps there is a nicer way to do this with Django aggregates? My initial thought was to perform: SQLQuery.objects.filter.aggregate(Sum(F('end_time')) - Sum(F('start_time'))) However this feature isnt available yet, however there has been talk for use of F objects within aggregates for four years here: https://code.djangoproject.com/ticket/14030. It looks like this will go in soon at which point this should be changed. """ return sum(x.time_taken for x in SQLQuery.objects.filter(request=self)) @property def headers(self): if self.encoded_headers: raw = json.loads(self.encoded_headers) else: raw = {} return CaseInsensitiveDictionary(raw) @property def content_type(self): return self.headers.get('content-type', None) @classmethod def garbage_collect(cls, force=False): """ Remove Request/Responses when we are at the SILKY_MAX_RECORDED_REQUESTS limit Note that multiple in-flight requests may call this at once causing a double collection """ check_percent = SilkyConfig().SILKY_MAX_RECORDED_REQUESTS_CHECK_PERCENT check_percent /= 100.0 if check_percent < random.random() and not force: return target_count = SilkyConfig().SILKY_MAX_RECORDED_REQUESTS # Since garbage collection is probabilistic, the target count should # be lowered to account for requests before the next garbage collection if check_percent != 0: target_count -= int(1 / check_percent) prune_count = max(cls.objects.count() - target_count, 0) prune_rows = cls.objects.order_by('start_time') \ .values_list('id', flat=True)[:prune_count] cls.objects.filter(id__in=list(prune_rows)).delete() def save(self, *args, **kwargs): # sometimes django requests return the body as 'None' if self.raw_body is None: self.raw_body = '' if self.body is None: self.body = '' if self.end_time and self.start_time: interval = self.end_time - self.start_time self.time_taken = interval.total_seconds() * 1000 super(Request, self).save(*args, **kwargs) Request.garbage_collect(force=False)
def detail_asset_view(request, asset_id): """Return asset details.""" asset = get_object_or_404(Asset.objects.for_user(request.user), id=asset_id) findings = Finding.objects.filter(asset=asset).annotate( severity_numm=Case(When(severity="critical", then=Value("0")), When(severity="high", then=Value("1")), When(severity="medium", then=Value("2")), When(severity="low", then=Value("3")), When(severity="info", then=Value("4")), default=Value("1"), output_field=CharField())).annotate( scope_list=ArrayAgg('scopes__name')).order_by( 'severity_numm', 'type', 'updated_at').only( "severity", "status", "engine_type", "risk_info", "vuln_refs", "title", "id", "solution", "updated_at", "type") findings_stats = { 'total': 0, 'critical': 0, 'high': 0, 'medium': 0, 'low': 0, 'info': 0, 'new': 0, 'ack': 0, 'cvss_gte_7': 0 } engines_stats = {} references = {} engine_scopes = {} for engine_scope in EnginePolicyScope.objects.all(): engine_scopes.update({ engine_scope.name: { 'priority': engine_scope.priority, 'id': engine_scope.id, 'total': 0, 'critical': 0, 'high': 0, 'medium': 0, 'low': 0, 'info': 0 } }) for finding in findings: findings_stats['total'] = findings_stats.get('total', 0) + 1 if finding.status not in ["false-positive", "duplicate"]: findings_stats[finding.severity] = findings_stats.get( finding.severity, 0) + 1 if finding.status == 'new': findings_stats['new'] = findings_stats.get('new', 0) + 1 if finding.status == 'ack': findings_stats['ack'] = findings_stats.get('ack', 0) + 1 for fs in finding.scope_list: if fs is not None: c = engine_scopes[fs] engine_scopes[fs].update({ 'total': c['total'] + 1, finding.severity: c[finding.severity] + 1 }) if finding.engine_type not in engines_stats.keys(): engines_stats.update({finding.engine_type: 0}) engines_stats[finding.engine_type] = engines_stats.get( finding.engine_type, 0) + 1 if finding.risk_info["cvss_base_score"] > 7.0: findings_stats['cvss_gte_7'] = findings_stats.get('cvss_gte_7', 0) + 1 if bool(finding.vuln_refs): for ref in finding.vuln_refs.keys(): if ref not in references.keys(): references.update({ref: []}) tref = references[ref] if type(finding.vuln_refs[ref]) is list: tref = tref + finding.vuln_refs[ref] else: tref.append(finding.vuln_refs[ref]) references.update({ref: tref}) # Show only unique references references_cleaned = {} for ref in references: references_cleaned.update({ref: sorted(list(set(references[ref])))}) # Related scans scans_stats = { 'performed': Scan.objects.filter(assets__in=[asset]).count(), 'defined': ScanDefinition.objects.filter(assets_list__in=[asset]).count(), 'periodic': ScanDefinition.objects.filter(assets_list__in=[asset], scan_type='periodic').count(), 'ondemand': ScanDefinition.objects.filter(assets_list__in=[asset], scan_type='single').count(), 'running': Scan.objects.filter( assets__in=[asset], status='started').count(), # bug: a regrouper par assets 'lasts': Scan.objects.filter(assets__in=[asset]).order_by('-updated_at')[:3] } asset_groups = list( AssetGroup.objects.for_user( request.user).filter(assets__in=[asset]).only("id")) scan_defs = ScanDefinition.objects.filter( Q(assets_list__in=[asset]) | Q(assetgroups_list__in=asset_groups)).annotate( engine_type_name=F('engine_type__name')).annotate( scan_set_count=Count('scan')).order_by('-updated_at') scans = Scan.objects.filter(assets__in=[asset]).values( "id", "title", "status", "summary", "updated_at").annotate( engine_type_name=F('engine_type__name')).order_by('-updated_at') # Investigation links investigation_links = [] DEFAULT_LINKS = copy.deepcopy(ASSET_INVESTIGATION_LINKS) for i in DEFAULT_LINKS: if asset.type in i["datatypes"]: if "link" in i.keys(): i["link"] = i["link"].replace("%asset%", asset.value) investigation_links.append(i) # Calculate automatically risk grade asset.calc_risk_grade() asset_risk_grade = { 'now': asset.get_risk_grade(), 'day_ago': asset.get_risk_grade(history=1), 'week_ago': asset.get_risk_grade(history=7), 'month_ago': asset.get_risk_grade(history=30), 'year_ago': asset.get_risk_grade(history=365) } return render( request, 'details-asset.html', { 'asset': asset, 'asset_risk_grade': asset_risk_grade, 'findings': findings, 'findings_stats': findings_stats, 'references': references_cleaned, 'scans_stats': scans_stats, 'scans': scans, 'scan_defs': scan_defs, 'investigation_links': investigation_links, 'engines_stats': engines_stats, 'asset_scopes': list(engine_scopes.items()) })
class AbstractSMSCampaign(CremeEntity): name = CharField(_('Name of the campaign'), max_length=100) lists = ManyToManyField( settings.SMS_MLIST_MODEL, verbose_name=_('Related messaging lists'), blank=True, ) creation_label = pgettext_lazy('sms', 'Create a campaign') save_label = pgettext_lazy('sms', 'Save the campaign') class Meta: abstract = True # manager_inheritance_from_future = True app_label = 'sms' verbose_name = _('SMS campaign') verbose_name_plural = _('SMS campaigns') ordering = ('name',) def __str__(self): return self.name def get_absolute_url(self): return reverse('sms__view_campaign', args=(self.id,)) @staticmethod def get_create_absolute_url(): return reverse('sms__create_campaign') def get_edit_absolute_url(self): return reverse('sms__edit_campaign', args=(self.id,)) @staticmethod def get_lv_absolute_url(): return reverse('sms__list_campaigns') def delete(self, *args, **kwargs): self.lists.clear() for sending in self.sendings.all(): sending.delete(*args, **kwargs) super().delete(*args, **kwargs) # def all_recipients(self): def all_phone_numbers(self): mlists = self.lists.filter(is_deleted=False) # Manual numbers recipients = { number for number in Recipient.objects .filter(messaging_list__in=mlists) .values_list('phone', flat=True) } # Contacts number recipients.update( contact.mobile for mlist in mlists for contact in mlist.contacts.filter(is_deleted=False) if contact.mobile ) return recipients
class Ingredient(Model): name=CharField(max_length=20) type=BooleanField(default=False) #seasoning=True, else=False def __str__(self): return self.name
def termination_reports(request): company = request.GET.get('company') date_range = request.GET.get('date_range') search = request.GET.get('search') sort_by = request.GET.get('sort_by') sort_choices = [ ('', 'Sort By'), ('last_name', 'Last Name'), ('first_name', 'First Name'), ('employee_id', 'Employee ID'), ('position', 'Position'), ('hire_date', 'Hire Date'), ('-termination_date', 'Termination Date'), ] start_date = datetime.datetime.strptime(date_range[:10], '%m/%d/%Y') if date_range else \ (datetime.datetime.today() - datetime.timedelta(days=365)) end_date = datetime.datetime.strptime(date_range[13:], '%m/%d/%Y') if date_range else \ datetime.datetime.today() termed_drivers = Employee.objects.filter(is_active=False) if search: try: search = int(search) termed_drivers = termed_drivers.filter(employee_id__exact=search) except ValueError: termed_drivers = Employee.objects.annotate(full_name=Concat( 'first_name', V(' '), 'last_name', output_field=CharField())).filter(full_name__icontains=search, is_active=False) if company: termed_drivers = termed_drivers.filter( company__display_name__exact=company) if start_date and end_date: termed_drivers = termed_drivers.filter( termination_date__gte=start_date, termination_date__lte=end_date) if sort_by: termed_drivers = termed_drivers.order_by(sort_by) f_form = FilterForm(sort_choices=sort_choices, data={ 'company': company, 'date_range': date_range, 'search': search, 'sort_by': sort_by }) page = request.GET.get('page') paginator = Paginator(termed_drivers, 25) page_obj = paginator.get_page(page) data = { 'page_obj': page_obj, 'f_form': f_form, 'start_date': start_date.strftime('%m/%d/%Y'), 'end_date': end_date.strftime('%m/%d/%Y'), } return render(request, 'operations/termination_reports.html', data)
def get_queryset(self, base_queryset=RoundsAndLabsQueryset): funds = ApplicationBase.objects.filter(path=OuterRef('parent_path')) return base_queryset(self.model, using=self._db).type(SubmittableStreamForm).annotate( lead=Coalesce( F('roundbase__lead__full_name'), F('labbase__lead__full_name'), ), start_date=F('roundbase__start_date'), end_date=F('roundbase__end_date'), parent_path=Left(F('path'), Length('path') - ApplicationBase.steplen, output_field=CharField()), fund=Subquery(funds.values('title')[:1]), lead_pk=Coalesce( F('roundbase__lead__pk'), F('labbase__lead__pk'), ), )
def __init__(self, jsonb, path): super().__init__(jsonb, Value(path, output_field=CharField()), output_field=CharField())
def attendance_reports(request): sort_by = request.GET.get('sort_by') reasons = request.GET.get('reasons') company_name = request.GET.get('company') date_range = request.GET.get('date_range') search = request.GET.get('search') start_date = datetime.datetime.strptime(date_range[:10], '%m/%d/%Y') if date_range else\ (datetime.datetime.today() - datetime.timedelta(days=365)) end_date = datetime.datetime.strptime(date_range[13:], '%m/%d/%Y') if date_range else\ datetime.datetime.today() attendance_records = Attendance.objects.filter(is_active=True, employee__is_active=True) sort_choices = [ ('', 'Sort By'), ('employee__last_name', 'Last Name'), ('-incident_date', 'Incident Date'), ('employee__first_name', 'First Name'), ('employee_id', 'Employee ID'), ('-points', 'Points'), ('-total_points', 'Total Points'), ] if search: try: search = int(search) attendance_records = attendance_records.filter( employee__employee_id=search) except ValueError: attendance_records = Attendance.objects.annotate( full_name=Concat('employee__first_name', V(' '), 'employee__last_name', output_field=CharField())).filter( full_name__icontains=search, is_active=True, employee__is_active=True) if sort_by == '-total_points': records = attendance_records.annotate(full_name=Concat( 'employee__first_name', V(' '), 'employee__last_name', output_field=CharField())).values('full_name').annotate( total_points=Sum('points')) attendance_records = attendance_records.annotate(total_points=Subquery( records.filter( Q(full_name__icontains=OuterRef('employee__first_name')) & Q(full_name__icontains=OuterRef('employee__last_name'))). values('total_points')[:1])) attendance_records = attendance_records.order_by('-total_points') elif sort_by: attendance_records = attendance_records.order_by(sort_by) if reasons: attendance_records = attendance_records.filter(reason__exact=reasons) if company_name: attendance_records = attendance_records.filter( employee__company__display_name=company_name) if start_date and end_date: attendance_records = attendance_records.filter( incident_date__gte=start_date, incident_date__lte=end_date) f_form = AttendanceFilterForm(data={ 'sort_by': sort_by, 'reasons': reasons, 'company': company_name, 'date_range': date_range, 'search': search }, sort_choices=sort_choices) page = request.GET.get('page') paginator = Paginator(attendance_records, 25) page_obj = paginator.get_page(page) data = { 'page_obj': page_obj, 'f_form': f_form, 'start_date': start_date.strftime('%m/%d/%Y'), 'end_date': end_date.strftime('%m/%d/%Y'), } return render(request, 'operations/attendance_reports.html', data)
def __init__(self, *args, **kwargs): kwargs.setdefault('editable', True) kwargs.setdefault('unique', True) kwargs.setdefault('max_length', 9) kwargs.setdefault('help_text', _('Format is 9999[12]9999')) CharField.__init__(self, *args, **kwargs)
def __init__(self, *args, **kwargs): kwargs.setdefault('editable', True) kwargs.setdefault('max_length', 25) kwargs.setdefault('choices', DATE_ESTIMATED) kwargs.setdefault('help_text', _('If the exact date is not known, please indicate which part of the date is estimated.')) CharField.__init__(self, *args, **kwargs)
class Author(Model): publicPersianName = CharField(max_length=NAME_MAX_LENGTH) publicName = CharField(max_length=NAME_MAX_LENGTH) def __str__(self): return self.publicName
class ResourcePool(CleanSave, TimestampedModel): """A resource pool.""" objects = ResourcePoolManager() name = CharField(max_length=256, unique=True, editable=True, validators=[MODEL_NAME_VALIDATOR]) description = TextField(null=False, blank=True, editable=True) class Meta(DefaultMeta): ordering = ['name'] def __str__(self): return self.name def is_default(self): """Whether this is the default pool.""" return self.id == 0 def delete(self): if self.is_default(): raise ValidationError( 'This is the default pool, it cannot be deleted.') if self.node_set.exists(): raise ValidationError( 'Pool has machines in it, it cannot be deleted.') self._get_pool_role().delete() super().delete() def grant_user(self, user): """Grant user access to the resource pool. XXX This should be dropped once we implement full RBAC, and the logic moved to methods in Role. """ role = self._get_pool_role() role.users.add(user) def revoke_user(self, user): """Revoke user access to the resource pool. XXX This should be dropped once we implement full RBAC, and the logic moved to methods in Role. """ from maasserver.models.node import Machine if Machine.objects.filter(pool=self, owner=user).exists(): raise ValidationError( 'User has machines in the pool, it cannot be revoked.') role = self._get_pool_role() role.users.remove(user) def _get_pool_role(self): """Return the Role associated to the pool. Until full RBAC is implemented, each ResourcePool is assigned to a single role. """ return self.role_set.first()
def counseling_reports(request): sort_by = request.GET.get('sort_by') action_type = request.GET.get('action_type') company_name = request.GET.get('company') date_range = request.GET.get('date_range') search = request.GET.get('search') sort_choices = [ ('', 'Sort By'), ('employee__last_name', 'Last Name'), ('-issued_date', 'Issued Date'), ('employee__first_name', 'First Name'), ('employee_id', 'Employee ID'), ] start_date = datetime.datetime.strptime(date_range[:10], '%m/%d/%Y') if date_range else\ (datetime.datetime.today() - datetime.timedelta(days=30)) end_date = datetime.datetime.strptime(date_range[13:], '%m/%d/%Y') if date_range else\ datetime.datetime.today() counseling_records = Counseling.objects.filter(is_active=True, employee__is_active=True) if search: try: search = int(search) counseling_records = counseling_records.filter( employee__employee_id=search) except ValueError: counseling_records = Counseling.objects.annotate( full_name=Concat('employee__first_name', V(' '), 'employee__last_name', output_field=CharField())).\ filter(full_name__icontains=search, employee__is_active=True, is_active=True) if sort_by: counseling_records = counseling_records.order_by(sort_by) if action_type: counseling_records = counseling_records.filter( action_type__exact=action_type) if company_name: counseling_records = counseling_records.filter( employee__company__display_name=company_name) if start_date and end_date: counseling_records = counseling_records.filter( issued_date__gte=start_date, issued_date__lte=end_date) f_form = CounselingFilterForm(sort_choices=sort_choices, data={ 'sort_by': sort_by, 'action_type': action_type, 'company': company_name, 'date_range': date_range, 'search': search }) page = request.GET.get('page') paginator = Paginator(counseling_records, 25) page_obj = paginator.get_page(page) data = { 'page_obj': page_obj, 'f_form': f_form, 'start_date': start_date.strftime('%m/%d/%Y'), 'end_date': end_date.strftime('%m/%d/%Y'), } return render(request, 'operations/counseling_reports.html', data)
def __init__(self, *args, **kwargs): kwargs.setdefault('verbose_name', _('Mobile')) kwargs.setdefault('editable', True) kwargs.setdefault('max_length', 8) kwargs.setdefault('help_text', _('The format is 99999999')) CharField.__init__(self, *args, **kwargs)
def __init__(self, expression, distinct=False, **extra): super(Concat, self).__init__(expression, distinct='DISTINCT ' if distinct else '', output_field=CharField(), **extra)
class BazarToken(Model): accessToken = TextField(max_length=255, null=False, blank=False) refreshToken = CharField(max_length=255, null=False, blank=False) tokenType = CharField(max_length=10, null=False, blank=False)
class Enzyme(BioMolecule): ec = CharField(max_length=255, blank=True, default='', verbose_name='ec-number')
def __init__(self, provider, report_type): """Constructor.""" self._mapping = [ { 'provider': 'OCP_AWS', 'alias': 'account_alias__account_alias', 'annotations': { 'cluster': 'cluster_id', 'project': 'namespace', 'account': 'usage_account_id', 'service': 'product_code', 'az': 'availability_zone' }, 'end_date': 'usage_end', 'filters': { 'project': { 'field': 'namespace', 'operation': 'icontains' }, 'cluster': [ { 'field': 'cluster_alias', 'operation': 'icontains', 'composition_key': 'cluster_filter' }, { 'field': 'cluster_id', 'operation': 'icontains', 'composition_key': 'cluster_filter' } ], 'node': { 'field': 'node', 'operation': 'icontains' }, 'account': [ { 'field': 'account_alias__account_alias', 'operation': 'icontains', 'composition_key': 'account_filter' }, { 'field': 'usage_account_id', 'operation': 'icontains', 'composition_key': 'account_filter' } ], 'service': { 'field': 'product_code', 'operation': 'icontains' }, 'product_family': { 'field': 'product_family', 'operation': 'icontains' }, 'az': { 'field': 'availability_zone', 'operation': 'icontains' }, 'region': { 'field': 'region', 'operation': 'icontains' } }, 'group_by_options': ['account', 'service', 'region', 'cluster', 'project', 'node', 'product_family'], 'tag_column': 'tags', 'report_type': { 'costs': { 'aggregates': { 'cost': Sum( Coalesce(F('unblended_cost'), Value(0, output_field=DecimalField())) + Coalesce(F('markup_cost'), Value(0, output_field=DecimalField())) ), 'infrastructure_cost': Sum(F('unblended_cost')), 'derived_cost': Sum(Value(0, output_field=DecimalField())), 'markup_cost': Sum( Coalesce(F('markup_cost'), Value(0, output_field=DecimalField())) ), }, 'annotations': { # Cost is the first column in annotations so that it # can reference the original database column 'markup_cost' # If cost comes after the markup_cost annotaton, then # Django will reference the annotated value, which is # a Sum() and things will break trying to add # a column with the sum of another column. 'cost': Sum( Coalesce(F('unblended_cost'), Value(0, output_field=DecimalField())) + Coalesce(F('markup_cost'), Value(0, output_field=DecimalField())) ), 'infrastructure_cost': Sum(F('unblended_cost')), 'derived_cost': Value(0, output_field=DecimalField()), 'markup_cost': Sum( Coalesce(F('markup_cost'), Value(0, output_field=DecimalField())) ), 'cost_units': Coalesce(Max('currency_code'), Value('USD')) }, 'count': None, 'delta_key': { 'cost': Sum( Coalesce(F('unblended_cost'), Value(0, output_field=DecimalField())) + Coalesce(F('markup_cost'), Value(0, output_field=DecimalField())) ) }, 'filter': [{}], 'cost_units_key': 'currency_code', 'cost_units_fallback': 'USD', 'sum_columns': ['cost', 'infrastructure_cost', 'derived_cost', 'markup_cost'], 'default_ordering': {'cost': 'desc'}, }, 'costs_by_project': { 'tables': { 'query': OCPAWSCostLineItemProjectDailySummary, 'total': OCPAWSCostLineItemProjectDailySummary }, 'aggregates': { 'cost': Sum( Coalesce(F('pod_cost'), Value(0, output_field=DecimalField()))\ + Coalesce(F('project_markup_cost'), Value(0, output_field=DecimalField())) ), 'infrastructure_cost': Sum('pod_cost'), 'derived_cost': Sum(Value(0, output_field=DecimalField())), 'markup_cost': Sum( Coalesce(F('project_markup_cost'), Value(0, output_field=DecimalField())) ), }, 'annotations': { 'cost': Sum( Coalesce(F('pod_cost'), Value(0, output_field=DecimalField()))\ + Coalesce(F('project_markup_cost'), Value(0, output_field=DecimalField())) ), 'infrastructure_cost': Sum('pod_cost'), 'derived_cost': Sum(Value(0, output_field=DecimalField())), 'markup_cost': Sum( Coalesce(F('project_markup_cost'), Value(0, output_field=DecimalField())) ), 'cost_units': Coalesce(Max('currency_code'), Value('USD')) }, 'count': None, 'delta_key': { 'cost': Sum( Coalesce(F('pod_cost'), Value(0, output_field=DecimalField()))\ + Coalesce(F('project_markup_cost'), Value(0, output_field=DecimalField())) ) }, 'filter': [{}], 'cost_units_key': 'currency_code', 'cost_units_fallback': 'USD', 'sum_columns': ['infrastructure_cost', 'markup_cost', 'derived_cost', 'cost'], 'default_ordering': {'cost': 'desc'}, }, 'storage': { 'aggregates': { 'cost': Sum( Coalesce(F('unblended_cost'), Value(0, output_field=DecimalField())) + Coalesce(F('markup_cost'), Value(0, output_field=DecimalField())) ), 'infrastructure_cost': Sum(F('unblended_cost')), 'derived_cost': Sum(Value(0, output_field=DecimalField())), 'markup_cost': Sum( Coalesce(F('markup_cost'), Value(0, output_field=DecimalField())) ), 'cost_units': Coalesce(Max('currency_code'), Value('USD')), 'usage': Sum(F('usage_amount')), 'usage_units': Coalesce(Max('unit'), Value('GB-Mo')) }, 'annotations': { 'cost': Sum( Coalesce(F('unblended_cost'), Value(0, output_field=DecimalField())) + Coalesce(F('markup_cost'), Value(0, output_field=DecimalField())) ), 'infrastructure_cost': Sum(F('unblended_cost')), 'derived_cost': Sum(Value(0, output_field=DecimalField())), 'markup_cost': Sum( Coalesce(F('markup_cost'), Value(0, output_field=DecimalField())) ), 'cost_units': Coalesce(Max('currency_code'), Value('USD')), 'usage': Sum(F('usage_amount')), 'usage_units': Coalesce(Max('unit'), Value('GB-Mo')) }, 'count': None, 'delta_key': {'usage': Sum('usage_amount')}, 'filter': [{ 'field': 'product_family', 'operation': 'contains', 'parameter': 'Storage' }, ], 'cost_units_key': 'currency_code', 'cost_units_fallback': 'USD', 'usage_units_key': 'unit', 'usage_units_fallback': 'GB-Mo', 'sum_columns': ['usage', 'infrastructure_cost', 'markup_cost', 'derived_cost', 'cost'], 'default_ordering': {'usage': 'desc'}, }, 'storage_by_project': { 'tables': { 'query': OCPAWSCostLineItemProjectDailySummary, 'total': OCPAWSCostLineItemProjectDailySummary }, 'aggregates': { 'cost': Sum( Coalesce(F('pod_cost'), Value(0, output_field=DecimalField())) + Coalesce(F('project_markup_cost'), Value(0, output_field=DecimalField())) ), 'infrastructure_cost': Sum('pod_cost'), 'derived_cost': Sum(Value(0, output_field=DecimalField())), 'markup_cost': Sum( Coalesce(F('project_markup_cost'), Value(0, output_field=DecimalField())) ), 'cost_units': Coalesce(Max('currency_code'), Value('USD')), 'usage': Sum('usage_amount'), 'usage_units': Coalesce(Max('unit'), Value('GB-Mo')) }, 'annotations': { 'cost': Sum( Coalesce(F('pod_cost'), Value(0, output_field=DecimalField())) + Coalesce(F('project_markup_cost'), Value(0, output_field=DecimalField())) ), 'infrastructure_cost': Sum('pod_cost'), 'derived_cost': Sum(Value(0, output_field=DecimalField())), 'markup_cost': Sum( Coalesce(F('project_markup_cost'), Value(0, output_field=DecimalField())) ), 'cost_units': Coalesce(Max('currency_code'), Value('USD')), 'usage': Sum('usage_amount'), 'usage_units': Coalesce(Max('unit'), Value('GB-Mo')) }, 'count': None, 'delta_key': {'usage': Sum('usage_amount')}, 'filter': [{ 'field': 'product_family', 'operation': 'contains', 'parameter': 'Storage' }, ], 'cost_units_key': 'currency_code', 'cost_units_fallback': 'USD', 'usage_units_key': 'unit', 'usage_units_fallback': 'GB-Mo', 'sum_columns': ['usage', 'cost', 'infrastructure_cost', 'derived_cost', 'markup_cost'], 'default_ordering': {'usage': 'desc'}, }, 'instance_type': { 'aggregates': { 'cost': Sum( Coalesce(F('unblended_cost'), Value(0, output_field=DecimalField())) + Coalesce(F('markup_cost'), Value(0, output_field=DecimalField())) ), 'infrastructure_cost': Sum(F('unblended_cost')), 'derived_cost': Sum(Value(0, output_field=DecimalField())), 'markup_cost': Sum( Coalesce(F('markup_cost'), Value(0, output_field=DecimalField())) ), 'cost_units': Coalesce(Max('currency_code'), Value('USD')), 'count': Count('resource_id', distinct=True), 'usage': Sum(F('usage_amount')), 'usage_units': Coalesce(Max('unit'), Value('GB-Mo')) }, 'aggregate_key': 'usage_amount', 'annotations': { 'cost': Sum( Coalesce(F('unblended_cost'), Value(0, output_field=DecimalField())) + Coalesce(F('markup_cost'), Value(0, output_field=DecimalField())) ), 'infrastructure_cost': Sum(F('unblended_cost')), 'derived_cost': Sum(Value(0, output_field=DecimalField())), 'markup_cost': Sum( Coalesce(F('markup_cost'), Value(0, output_field=DecimalField())) ), 'cost_units': Coalesce(Max('currency_code'), Value('USD')), 'count': Count('resource_id', distinct=True), 'count_units': Value('instances', output_field=CharField()), 'usage': Sum(F('usage_amount')), 'usage_units': Coalesce(Max('unit'), Value('Hrs')) }, 'count': 'resource_id', 'delta_key': {'usage': Sum('usage_amount')}, 'filter': [{ 'field': 'instance_type', 'operation': 'isnull', 'parameter': False }, ], 'group_by': ['instance_type'], 'cost_units_key': 'currency_code', 'cost_units_fallback': 'USD', 'usage_units_key': 'unit', 'usage_units_fallback': 'Hrs', 'count_units_fallback': 'instances', 'sum_columns': ['usage', 'cost', 'infrastructure_cost', 'markup_cost', 'derived_cost', 'count'], 'default_ordering': {'usage': 'desc'}, }, 'instance_type_by_project': { 'tables': { 'query': OCPAWSCostLineItemProjectDailySummary, 'total': OCPAWSCostLineItemProjectDailySummary }, 'aggregates': { 'cost': Sum( Coalesce(F('pod_cost'), Value(0, output_field=DecimalField())) + Coalesce(F('project_markup_cost'), Value(0, output_field=DecimalField())) ), 'infrastructure_cost': Sum('pod_cost'), 'derived_cost': Sum(Value(0, output_field=DecimalField())), 'markup_cost': Sum( Coalesce(F('project_markup_cost'), Value(0, output_field=DecimalField())) ), 'cost_units': Coalesce(Max('currency_code'), Value('USD')), 'count': Count('resource_id', distinct=True), 'usage': Sum('usage_amount'), 'usage_units': Coalesce(Max('unit'), Value('GB-Mo')) }, 'aggregate_key': 'usage_amount', 'annotations': { 'cost': Sum( Coalesce(F('pod_cost'), Value(0, output_field=DecimalField())) + Coalesce(F('project_markup_cost'), Value(0, output_field=DecimalField())) ), 'infrastructure_cost': Sum('pod_cost'), 'derived_cost': Sum(Value(0, output_field=DecimalField())), 'markup_cost': Sum( Coalesce(F('project_markup_cost'), Value(0, output_field=DecimalField())) ), 'cost_units': Coalesce(Max('currency_code'), Value('USD')), 'count': Count('resource_id', distinct=True), 'count_units': Value('instances', output_field=CharField()), 'usage': Sum('usage_amount'), 'usage_units': Coalesce(Max('unit'), Value('Hrs')) }, 'count': 'resource_id', 'delta_key': {'usage': Sum('usage_amount')}, 'filter': [{ 'field': 'instance_type', 'operation': 'isnull', 'parameter': False }, ], 'group_by': ['instance_type'], 'cost_units_key': 'currency_code', 'cost_units_fallback': 'USD', 'usage_units_key': 'unit', 'usage_units_fallback': 'Hrs', 'count_units_fallback': 'instances', 'sum_columns': ['usage', 'cost', 'infrastructure_cost', 'markup_cost', 'derived_cost', 'count'], 'default_ordering': {'usage': 'desc'}, }, }, 'start_date': 'usage_start', 'tables': { 'query': OCPAWSCostLineItemDailySummary, 'total': OCPAWSCostLineItemDailySummary }, } ] super().__init__(provider, report_type)
class Color(Model): name = CharField(max_length=255, blank=True, default='', verbose_name='color')
class Order(Model): ORDER_STATUSES = (('P', 'Pending'), ('A', 'Processing'), ('S', 'Shipped'), ('C', 'Cancelled')) date_ordered = DateTimeField(auto_now=True) customer = ForeignKey(Customer, on_delete=CASCADE) status = CharField(max_length=2, choices=ORDER_STATUSES, default='P') customer_deposit_photo = FileField(blank=True, null=True, default=None) customer_payment_date = DateField(null=True, blank=True, default=None) payment_verified = BooleanField(default=False) queue_id = CharField(null=True, max_length=64) @staticmethod def print_orders_containing_product(product): orders = [ order for order in Order.objects.all() if order.has_product(product) ] for order in orders: print(f"Order #{order.id}") for line_item in order.orderlineitems_set.all(): print(line_item.product.name) print() @property def total_price(self): order_items = self.orderlineitems_set.all() total_price = 0.00 for order_item in order_items: total_price += float(order_item.line_price) return total_price def submit_customer_payment(self, deposit_photo, payment_date): self.customer_deposit_photo = deposit_photo self.customer_payment_date = payment_date self.status = 'A' self.save() def approve_customer_payment(self): self.status = 'A' # Change to Processing self.payment_verified = True self.save() def reject_customer_payment(self): self.customer_deposit_photo = None self.customer_payment_date = None self.status = 'P' self.save() def accept_customer_payment(self): self.payment_verified = True self.save() def cancel(self): self.status = 'C' app.control.revoke(self.queue_id, terminate=True) # Return product to inventory for line_item in self.orderlineitems_set.all(): product = line_item.product product.quantity += line_item.quantity product.save() self.save() def has_products(self, *products): for product in products: if not self.has_product(product): return False return True def has_product(self, product): order_items = self.orderlineitems_set.all() for order_item in order_items: if order_item.product == product: return True return False
class Class(Biddable, Event): ''' A Class is an Event where one or a few people teach/instruct/guide/mediate and a number of participants spectate/participate. ''' teacher = ForeignKey(Persona, on_delete=CASCADE, related_name='is_teaching') minimum_enrollment = IntegerField(blank=True, default=1) maximum_enrollment = IntegerField(blank=True, default=20, null=True) organization = CharField(max_length=128, blank=True) type = CharField(max_length=128, choices=class_options, blank=True, default="Lecture") fee = IntegerField(blank=True, default=0, null=True) other_teachers = CharField(max_length=128, blank=True) length_minutes = IntegerField(choices=class_length_options, default=60, blank=True) history = TextField(blank=True) run_before = TextField(blank=True) schedule_constraints = TextField(blank=True) avoided_constraints = TextField(blank=True) space_needs = CharField(max_length=128, choices=space_options, blank=True, default='') physical_restrictions = TextField(blank=True) multiple_run = CharField(max_length=20, choices=yesno_options, default="No") def clone(self): new_class = Class() new_class.teacher = self.teacher new_class.minimum_enrollment = self.minimum_enrollment new_class.organization = self.organization new_class.type = self.type new_class.fee = self.fee new_class.other_teachers = self.other_teachers new_class.length_minutes = self.length_minutes new_class.history = self.history new_class.run_before = self.run_before new_class.space_needs = self.space_needs new_class.physical_restrictions = self.physical_restrictions new_class.multiple_run = self.multiple_run new_class.duration = self.duration new_class.e_title = self.e_title new_class.e_description = self.e_description new_class.e_conference = Conference.objects.filter( status="upcoming").first() new_class.b_title = self.b_title new_class.b_description = self.b_description new_class.b_conference = Conference.objects.filter( status="upcoming").first() new_class.save() return new_class @property def get_space_needs(self): needs = "" for top, top_opts in space_options: for key, sub_level in top_opts: if key == self.space_needs: needs = top + " - " + sub_level return needs @property def bid_review_header(self): return ([ 'Title', 'Teacher', 'Type', 'Last Update', 'State', 'Reviews', 'Action' ]) @property def bid_review_summary(self): return [ self.b_title, self.teacher, self.type, self.updated_at.strftime(GBE_TABLE_FORMAT), acceptance_states[self.accepted][1] ] @property def profile(self): return self.teacher.contact def __str__(self): if self.e_title and len(self.e_title) > 0: return self.e_title return self.b_title # tickets that apply to class are: # - any ticket that applies to "most" # - any ticket that applies to the conference # - any ticket that links this event specifically # but for all tickets - iff the ticket is active # def get_tickets(self): return get_tickets(self, most=True, conference=True) class Meta: verbose_name_plural = 'classes' app_label = "gbe"
class Course(Model): route = ForeignKey('Route', on_delete=CASCADE) code = CharField(primary_key=True, max_length=20) def __str__(self): return self.code
class VersionedTextFile(CleanSave, TimestampedModel): """An immutable `TextFile` which keeps track of its previous versions. :ivar data: The data belonging to this TextFile. :ivar previous_version: Optional previous version of this file. """ class Meta(DefaultMeta): """Needed for South to recognize this model.""" verbose_name = "VersionedTextFile" verbose_name_plural = "VersionedTextFiles" previous_version = ForeignKey("self", on_delete=CASCADE, default=None, blank=True, null=True, editable=True, related_name="next_versions") data = TextField(editable=False, blank=True, null=True, help_text="File contents") comment = CharField(editable=True, max_length=255, blank=True, null=True, unique=False, help_text="Description of this version") def update(self, new_data, comment=None): """Updates this `VersionedTextFile` with the specified `new_data` and returns a newly-created `VersionedTextFile`. If the file has changed, it will be updated with the specified `comment`, if supplied. """ if new_data == self.data: return self else: updated = VersionedTextFile(previous_version_id=self.id, data=new_data, comment=comment) updated.save() return updated def clean(self): if self.id is not None: raise ValidationError("VersionedTextFile contents are immutable.") def get_oldest_version(self): oldest_known = self while oldest_known.previous_version is not None: oldest_known = oldest_known.previous_version return oldest_known def revert(self, to, gc=True, gc_hook=None): """Return a VersionTextFile object in this objects history. The returned object is specified by the VersionTextFile id or a negative number with how far back to go. By default newer objects then the one returned will be removed. You can optionally provide a garbage collection hook which accepts a single parameter being the value being reverted to. This allows you to revert a value and do garbage collection when the foreign key is set to cascade. """ if to == 0: return self elif to < 0: history = [textfile for textfile in self.previous_versions()] to = -to if to >= len(history): raise ValueError("Goes too far back.") if gc: if gc_hook is not None: gc_hook(history[to]) history[to - 1].delete() return history[to] else: next_textfile = None for textfile in self.previous_versions(): if textfile.id == to: if next_textfile is not None and gc: if gc_hook is not None: gc_hook(textfile) next_textfile.delete() return textfile next_textfile = textfile raise ValueError("%s not found in history" % to) def previous_versions(self): """Return an iterator of this object and all previous versions.""" class VersionedTextFileIterator(): def __init__(self, textfile): self.textfile = textfile def __iter__(self): return self def __next__(self): textfile = self.textfile if textfile is None: raise StopIteration else: self.textfile = self.textfile.previous_version return textfile return VersionedTextFileIterator(self)
class TransportMode(Model): mode = CharField(max_length=20, blank=True, null=True) code = IntegerField(primary_key=True) def __str__(self): return self.mode
def PopulateQuery(self, columnKey, ValueKey, criteria=CriteriaSearch.Like, typeofData=DataType.VarChar): NAData = None filterfield = columnKey + '=' if criteria == CriteriaSearch.NotEqual or criteria == CriteriaSearch.NotIn: if criteria == CriteriaSearch.NotIn: filterfield = columnKey + '__in' else: filterfield = columnKey + '__iexact' NAData = super( NA_BR_Goods, self).get_queryset().exclude(**{filterfield: [ValueKey]}) NAData.annotate(typeofdepreciation=Case( When(depreciationmethod__iexact='SL', then=Value('Stright Line')), When(depreciationmethod__iexact='DDB', then=Value('Double Declining Balance')), When(depreciationmethod__iexact='STYD', then=Value('Sum of The Year Digit')), When(depreciationmethod__iexact='SH', then=Value('Service Hours')), output_field=CharField())).values( 'idapp', 'itemcode', 'goodsname', 'brandname', 'typeapp', 'priceperunit', 'typeofdepreciation', 'unit', 'economiclife', 'placement', 'descriptions', 'inactive', 'createdby', 'createddate') if criteria == CriteriaSearch.Equal: return super(NA_BR_Goods, self).get_queryset().filter(**{ filterfield: ValueKey }).values_list('itemcode', 'goodsname', 'brandname', 'typeap', 'priceperunit', 'depreciationmethod', 'unit', 'economiclife', 'placement', 'descriptions', 'inactive') elif criteria == CriteriaSearch.Greater: filterfield = columnKey + '__gt' elif criteria == CriteriaSearch.GreaterOrEqual: filterfield = columnKey + '__gte' elif criteria == CriteriaSearch.In: filterfield = columnKey + '__in' elif criteria == CriteriaSearch.Less: filterfield = columnKey + '__lt' elif criteria == CriteriaSearch.LessOrEqual: filterfield = columnKey + '__lte' elif criteria == CriteriaSearch.Like: filterfield = columnKey + '__icontains' #return super(NA_BR_Goods,self).get_queryset().filter(**{filterfield: [ValueKey] if filterfield == (columnKey + '__in') else ValueKey}).annotate( # depreciation=Case(When(depreciationmethod__iexact='SL', then=Value('Stright Line')), # When(depreciationmethod__iexact='DDB',then=Value('Double Declining Balance')), # When(depreciationmethod__iexact='STYD',then=Value('Sum of The Year Digit')), # When(depreciationmethod__iexact='SH',then=Value('Service Hours')), # output_field=CharField()) # ).values_list('idapp','itemcode','goodsname','brandname','typeapp','priceperunit','depreciationmethod','unit','economiclife','placement','descriptions','inactive') # from django.db.models import F #cityList = City.objects.using(settings.DATABASE_CONF).filter(status=1).values( # 'city_name_en', 'city_id') ## use F expression to annotate with an alias #cityList = cityList.annotate(cityname=F('city_name_en')) NAData = super(NA_BR_Goods, self).get_queryset().filter( **{ filterfield: [ValueKey] if filterfield == ( columnKey + '__in') else ValueKey }) if criteria == CriteriaSearch.Beetween or criteria == CriteriaSearch.BeginWith or criteria == CriteriaSearch.EndWith: rs = ResolveCriteria(criteria, typeofData, columnKey, ValueKey) NAData = super(NA_BR_Goods, self).get_queryset().filter(S**rs.DefaultModel()) NAData = NAData.annotate(typeofdepreciation=Case( When(depreciationmethod__iexact='SL', then=Value('Stright Line')), When(depreciationmethod__iexact='DDB', then=Value('Double Declining Balance')), When(depreciationmethod__iexact='STYD', then=Value('Sum of The Year Digit')), When(depreciationmethod__iexact='SH', then=Value('Service Hours')), output_field=CharField())).values( 'idapp', 'itemcode', 'goodsname', 'brandname', 'typeapp', 'priceperunit', 'typeofdepreciation', 'unit', 'economiclife', 'placement', 'descriptions', 'inactive', 'createdby', 'createddate') return NAData
def list_assets_view(request): """List assets.""" # Check team teamid_selected = -1 if settings.PRO_EDITION is True and request.GET.get( 'team', '').isnumeric() and int(request.GET.get('team', -1)) >= 0: teamid = int(request.GET.get('team')) # @Todo: ensure the team is allowed for this user teamid_selected = teamid teams = [] if settings.PRO_EDITION and request.user.is_superuser: teams = Team.objects.all().order_by('name') elif settings.PRO_EDITION and not request.user.is_superuser: for tu in TeamUser.objects.filter(user=request.user): teams.append({ 'id': tu.organization.id, 'name': tu.organization.name }) # Check sorting options allowed_sort_options = [ "id", "name", "criticity_num", "score", "type", "updated_at", "risk_level", "risk_level__grade", "-id", "-name", "-criticity_num", "-score", "-type", "-updated_at", "-risk_level", "-risk_level__grade" ] sort_options = request.GET.get("sort", "-updated_at") sort_options_valid = [] for s in sort_options.split(","): if s in allowed_sort_options and s not in sort_options_valid: sort_options_valid.append(str(s)) # Check Filtering options # filter_options = request.GET.get("filter", "") filter_name = request.GET.get("filter_name", "") filter_type = request.GET.get("filter_type", "") filter_criticity = request.GET.get("filter_criticity", "") filter_tag = request.GET.get("filter_tag", "") # filter_fields = {} if teamid_selected >= 0: assets_list = Asset.objects.for_team(request.user, teamid_selected).all() else: assets_list = Asset.objects.for_user(request.user).all() filters = Q() if filter_name and filter_name != 'null': filter_name = filter_name.split(',') for term in filter_name: filters |= (Q(name__icontains=term) | Q(description__icontains=term)) if filter_type and filter_type != 'null': filters &= Q(type=filter_type) if filter_criticity and filter_criticity != 'null': filters &= Q(criticity=filter_criticity) if filter_tag and filter_tag != 'null': filter_tag = filter_tag.split(',') # https://stackoverflow.com/questions/25831081/django-orm-dynamically-add-multiple-conditions-for-manytomanyfield for tag in filter_tag: assets_list = assets_list.filter(categories__value=tag) # Query if teamid_selected >= 0: assets_list = assets_list.filter(filters).annotate(criticity_num=Case( When(criticity="high", then=Value("1")), When(criticity="medium", then=Value("2")), When(criticity="low", then=Value("3")), default=Value("1"), output_field=CharField())).annotate( cat_list=ArrayAgg('categories__value')).order_by( *sort_options_valid) else: assets_list = assets_list.filter(filters).annotate(criticity_num=Case( When(criticity="high", then=Value("1")), When(criticity="medium", then=Value("2")), When(criticity="low", then=Value("3")), default=Value("1"), output_field=CharField())).annotate( cat_list=ArrayAgg('categories__value')).order_by( *sort_options_valid) # Pagination assets nb_rows = int(request.GET.get('n', 20)) assets_paginator = Paginator(assets_list, nb_rows) page = request.GET.get('page') try: assets = assets_paginator.page(page) except PageNotAnInteger: assets = assets_paginator.page(1) except EmptyPage: assets = assets_paginator.page(assets_paginator.num_pages) # List asset groups asset_groups = [] if teamid_selected >= 0: ags = AssetGroup.objects.for_team( request.user, teamid_selected).all().annotate( asset_list=ArrayAgg('assets__value')).only( "id", "name", "assets", "criticity", "updated_at", "risk_level", "teams") else: ags = AssetGroup.objects.for_user(request.user).all().annotate( asset_list=ArrayAgg('assets__value')).only("id", "name", "assets", "criticity", "updated_at", "risk_level", "teams") for asset_group in ags.order_by(Lower("name")): assets_names = "" if asset_group.asset_list != [None]: assets_names = ", ".join(asset_group.asset_list) ag = { "id": asset_group.id, "name": asset_group.name, "criticity": asset_group.criticity, "updated_at": asset_group.updated_at, "assets_names": assets_names, "risk_grade": asset_group.risk_level['grade'], "teams": asset_group.teams } asset_groups.append(ag) tags = assets_list.values_list( 'categories__value', flat=True).order_by('categories__value').distinct() owners = AssetOwner.objects.all() return render( request, 'list-assets.html', { 'assets': assets, 'asset_groups': asset_groups, 'teams': teams, 'tags': tags, 'owners': owners })
class UserProfile(JeevesModel): username = CharField(max_length=1024) email = CharField(max_length=1024)