def __init__(self, *args, **kwargs): super(EncryptedPKModel, self).__init__(*args, **kwargs) setattr( self.__class__, "encrypted_%s" % (self._meta.pk.name,), cached_property(self.__class__._encrypted_pk) )
def __new__(mcs, name, bases, attrs): # We can't use it on __init__ because # cached_property fires on property's __get__ for attr in mcs.CACHED_ATTRS: if attr in attrs: attrs[attr] = cached_property(attrs[attr]) return super(WidgetMeta, mcs).__new__(mcs, name, bases, attrs)
class A(object): @cached_property def value(self): """Here is the docstring...""" return 1, object() def other_value(self): return 1 other = cached_property(other_value, name='other')
def test_cached_property_set_name_not_called(self): cp = cached_property(lambda s: None) class Foo: pass Foo.cp = cp msg = 'Cannot use cached_property instance without calling __set_name__() on it.' with self.assertRaisesMessage(TypeError, msg): Foo().cp
class Class: @cached_property def __value(self): """Here is the docstring...""" return 1, object() def other_value(self): """Here is the docstring...""" return 1, object() other = cached_property(other_value)
class User(AbstractBaseUser, PermissionsMixin, models.Model): id = SmallUUIDField(default=uuid_default(), primary_key=True, db_index=True, editable=False, verbose_name='ID') full_name = models.CharField(max_length=100) email = models.EmailField(unique=True, db_index=True) # Normally provided by auth.AbstractUser, but we're not using that here. date_joined = models.DateTimeField(_('date joined'), default=timezone.now, editable=False) is_active = models.BooleanField( _('active'), default=True, help_text=_('Designates whether this user should be treated as ' 'active. Unselect this instead of deleting accounts.')) is_staff = models.BooleanField( _('staff status'), default=False, help_text=_( 'Designates whether the user can log into this admin site.') ) # Required for Django Admin, for tenant staff/admin see role EMAIL_FIELD = 'email' USERNAME_FIELD = 'email' REQUIRED_FIELDS = ['full_name'] objects = managers.UserManager() class Meta: ordering = ('-date_joined', ) def get_name(self): name = HumanName(self.full_name) name.capitalize() return name def set_name(self, value): self.full_name = value del self.name name = cached_property(get_name) name.setter = set_name def get_short_name(self): return self.name.first def get_full_name(self): return self.name
def cached_property_dao(alias=None, dao_class_or_namespace=None): """ 类属性(cached_property),可解决 import 依赖问题 class XXX: token_dao = dao.cached_property_dao("authtoken.token") """ # pylint: disable=unused-argument def _func(self): return get_lazy_dao(alias, dao_class_or_namespace) return functional.cached_property(_func)
class AuthorizationCreateView(LoginRequiredMixin, RelatedAuthorizationsMixin, CreateView): model = Authorization form_class = AuthorizationForm def form_valid(self, form): response = super(AuthorizationCreateView, self).form_valid(form) messages.success(self.request, _("L'accès a été créé avec succès")) return response def get_authorization_definition(self): """ Returns a tuple of the form (codename, label) for the considered authorization. """ authorization_labels_dict = dict( self.get_related_authorization_choices()) try: codename = self.request.GET.get('codename', None) assert codename is not None assert codename in authorization_labels_dict except AssertionError: raise Http404 return codename, authorization_labels_dict[codename] def get_context_data(self, **kwargs): context = super(AuthorizationCreateView, self).get_context_data(**kwargs) context['authorization_codename'], context['authorization_label'] \ = self.authorization_definition return context def get_form_kwargs(self): kwargs = super(AuthorizationCreateView, self).get_form_kwargs() authorization_def = self.authorization_definition kwargs.update({ 'codename': authorization_def[0], 'target': self.get_target_instance(), }) return kwargs def get_target_instance(self): """ Returns the target instance for which we want to create authorizations. """ raise NotImplementedError authorization_definition = cached_property(get_authorization_definition)
class Hospital(AuditControlModelBase): """ Simple helper class to hold information related to the various project and non-project locations. Currently referenced from Donor, OrganAllocation, RetrievalTeam, Person This data is so generic and widely used that there should be no restrictions on geography, and everyone should be able to view it. """ name = models.CharField(verbose_name=_("HO01 hospital name"), max_length=100) country = models.PositiveSmallIntegerField(verbose_name=_("HO02 country"), choices=COUNTRY_CHOICES) is_active = models.BooleanField( verbose_name=_("HO03 is active"), default=True, help_text= "Not presently used/implemented. For legacy data when a location closes for use" ) is_project_site = models.BooleanField( verbose_name=_("HO04 is project site"), default=False) objects = LiveManager() class Meta: ordering = ['country', 'name'] verbose_name = _('HOm1 hospital') verbose_name_plural = _('HOm2 hospitals') def _full_description(self): return '%s, %s' % (self.name, self.get_country_display()) full_description = cached_property(_full_description, name='full_description') def get_absolute_url(self): return reverse("locations:detail", kwargs={"pk": self.pk}) def __str__(self): return self.full_description
class Podcast(models.Model): name = models.CharField(max_length=70, blank=True) slug = models.SlugField(unique=True) provider = models.ForeignKey(PodcastProvider, on_delete=models.PROTECT) remote_id = models.CharField(max_length=200, verbose_name='Provider ID of the podcast') spotify_id = models.CharField(max_length=70, blank=True, null=True, verbose_name='Spotify show ID') is_public = models.BooleanField(verbose_name='visibility') def fetch_details(self) -> PodcastDetails: details = self.provider.fetch_podcast_details(self) details.slug = self.slug if self.spotify_url is not None and 'spotify' not in details.external_urls: details.external_urls['spotify'] = self.spotify_url return details cached_details = cached_property(fetch_details, name='cached_details') @property def description(self): return self.cached_details.description @property def spotify_url(self): if self.spotify_id is not None: return f"https://open.spotify.com/show/{self.spotify_id}" else: return None def __str__(self): return self.name
class User(AbstractBaseUser, PermissionsMixin, DirtyFieldsMixin, models.Model): LANGUAGES = Choices(*settings.LANGUAGES) id = SmallUUIDField(default=uuid_default(), primary_key=True, db_index=True, editable=False, verbose_name='ID') full_name = models.CharField(max_length=100) email = models.EmailField(unique=True, db_index=True) phone = PhoneNumberField( blank=True, help_text= "Your cell phone number for receiving air quality text alerts.") phone_verified = models.BooleanField(default=False) language = models.CharField(_('Preferred Language'), max_length=5, choices=LANGUAGES, default=LANGUAGES.en) # Normally provided by auth.AbstractUser, but we're not using that here. date_joined = models.DateTimeField(_('date joined'), default=timezone.now, editable=False) is_active = models.BooleanField( _('active'), default=True, help_text=_('Designates whether this user should be treated as ' 'active. Unselect this instead of deleting accounts.')) is_staff = models.BooleanField( _('staff status'), default=False, help_text=_( 'Designates whether the user can log into this admin site.') ) # Required for Django Admin, for tenant staff/admin see role EMAIL_FIELD = 'email' USERNAME_FIELD = 'email' REQUIRED_FIELDS = ['full_name'] objects = managers.UserManager() class Meta: ordering = ('-date_joined', ) def get_name(self): name = HumanName(self.full_name) name.capitalize() return name def set_name(self, value): self.full_name = value del self.name name = cached_property(get_name) name.setter = set_name def get_short_name(self): return self.name.first def get_full_name(self): return self.name @property def verify_phone_rate_limit_key(self): return f'phone-rate-limit:{self.phone}' @property def verify_phone_code_key(self): return f'phone-code:{self.phone}' def send_sms(self, message, verify=True): if self.phone and (self.phone_verified or not verify): return send_sms_message(self.phone, message) return False def save(self, *args, **kwargs): dirty_fields = self.get_dirty_fields() if 'phone' in dirty_fields: self.phone_verified = False return super().save(*args, **kwargs)
class Patient(AuditControlModelBase): """ Base attributes for a person involved in this case as a donor or recipient. Patients are not localised in and of themselves, but Donors and Recipients are, thus this class has no geographic permissions set. Similarly, because this data is a subset of the Donor and Recipient records it will be treated using the permissions given to those objects. """ MALE = 'M' #: CONSTANT for GENDER_CHOICES FEMALE = 'F' #: CONSTANT for GENDER_CHOICES GENDER_CHOICES = ((MALE, _('OPc01 Male')), (FEMALE, _('OPc02 Female')) ) #: Patient gender choices CAUCASIAN = 1 #: CONSTANT for ETHNICITY_CHOICES BLACK = 2 #: CONSTANT for ETHNICITY_CHOICES OTHER_ETHNICITY = 3 #: CONSTANT for ETHNICITY_CHOICES ETHNICITY_CHOICES = ( (CAUCASIAN, _('OPc03 Caucasian')), (BLACK, _('OPc04 Black')), (OTHER_ETHNICITY, _('OPc05 Other'))) #: Patient ethnicity choices BLOOD_O = 1 #: CONSTANT for BLOOD_GROUP_CHOICES BLOOD_A = 2 #: CONSTANT for BLOOD_GROUP_CHOICES BLOOD_B = 3 #: CONSTANT for BLOOD_GROUP_CHOICES BLOOD_AB = 4 #: CONSTANT for BLOOD_GROUP_CHOICES BLOOD_UNKNOWN = 5 #: CONSTANT for BLOOD_GROUP_CHOICES BLOOD_GROUP_CHOICES = ( (BLOOD_O, 'O'), (BLOOD_A, 'A'), (BLOOD_B, 'B'), (BLOOD_AB, 'AB'), (BLOOD_UNKNOWN, _('OPc06 Unknown'))) #: Patient blood_group choices # "ET Donor number/ NHSBT Number", number = models.CharField(verbose_name=_('OP01 NHSBT Number'), max_length=20, blank=True) date_of_birth = models.DateField(verbose_name=_('OP02 date of birth'), blank=True, null=True, validators=[validate_not_in_future], help_text="Date can not be in the future") date_of_birth_unknown = models.BooleanField( default=False, help_text="Internal unknown flag") date_of_death = models.DateField(verbose_name=_('OP08 date of death'), blank=True, null=True, validators=[validate_not_in_future], help_text="Date can not be in the future") date_of_death_unknown = models.BooleanField( default=False, help_text="Internal unknown flag") gender = models.CharField(verbose_name=_('OP03 gender'), choices=GENDER_CHOICES, max_length=1, default=MALE) weight = models.DecimalField( max_digits=4, decimal_places=1, verbose_name=_('OP04 Weight (kg)'), validators=[MinValueValidator(20.0), MaxValueValidator(200.0)], blank=True, null=True, help_text="Answer must be in range 20.0-200.0kg") height = models.PositiveSmallIntegerField( verbose_name=_('OP05 Height (cm)'), validators=[MinValueValidator(100), MaxValueValidator(250)], blank=True, null=True, help_text="Answer must be in range 100-250cm") ethnicity = models.IntegerField(verbose_name=_('OP06 ethnicity'), choices=ETHNICITY_CHOICES, blank=True, null=True) blood_group = models.PositiveSmallIntegerField( verbose_name=_('OP07 blood group'), choices=BLOOD_GROUP_CHOICES, blank=True, null=True) objects = LiveManager() class Meta: ordering = ['number'] verbose_name = _('OPm1 trial person') verbose_name_plural = _('OPm2 organ people') # db_table = 'compare_patient' def clean(self): """ Clears date_of_birth if unknown is flagged. Clears date_of_death if unknown is flagged. Error if date_of_death is in the future (OPv02). Error if date_of_death is before date_of_birth (OPv03) """ # Clean the fields that are Unknown if self.date_of_birth_unknown: self.date_of_birth = None if self.date_of_death_unknown: self.date_of_death = None if self.date_of_death: if self.date_of_death > timezone.now().date(): raise ValidationError( _("OPv02 Creepy prediction! Person's date of death is in the future!" )) if self.date_of_birth and self.date_of_death: if self.date_of_death < self.date_of_birth: raise ValidationError( _("OPv03 Time running backwards! Person's date of death is before they were born!" )) @property def bmi_value(self): """ Calculated BMI based on stored height and weight information. Uses http://www.nhs.uk/chq/Pages/how-can-i-work-out-my-bmi.aspx?CategoryID=51 for formula :return: BMI value (or None) :rtype: float """ if self.height < 1 or self.weight < 1: return None # _("DOv12 Not Available") height_in_m = self.height / 100 return (self.weight / height_in_m) / height_in_m def _age_from_dob(self): """ Determines a person's age from their Date of Birth, compared initially against a Date of Death (if it exists), or against the current date if not applicable. :return: age in years as a whole number, if date of birth is known, otherwise None :rtype: int """ the_end = self.date_of_death if self.date_of_death else timezone.now( ).date() if self.date_of_birth: return relativedelta(the_end, self.date_of_birth).years return None age_from_dob = cached_property(_age_from_dob, name='age_from_dob') def _is_recipient(self): """ Determine if a recipient record is linked to this person :return: True if recipient link exists :rtype: bool """ try: return self.recipient is not None except ObjectDoesNotExist: return False is_recipient = cached_property(_is_recipient, name='is_recipient') def _is_donor(self): """ Determine if a donor record is linked to this person :return: True if donor link exists :rtype: bool """ try: return self.donor is not None except ObjectDoesNotExist: return False is_donor = cached_property(_is_donor, name='is_donor') @property def is_alive(self): """ If no date of death is known, then presume person is alive. :return: True if date_of_death is unknown :rtype: bool """ return True if self.date_of_death is None else False @property def trial_id(self): """ Determine if donor or recipient, and then pass back their trial id :return: """ if self.is_donor: return self.donor.trial_id return self.recipient.trial_id def __str__(self): if settings.DEBUG: return '%s : (%s, %s) %s' % (self.id, self.get_gender_display(), self.age_from_dob, self.number) else: return '(%s, %s) %s' % (self.get_gender_display(), self.age_from_dob, self.number)
class Person(AbstractUser): """ Replacement for original StaffPerson class which was linked to User as a one-to-one via "profile" attribute. This represents any person that is a member of staff (medical, administrative, academic, etc) and therefore not a patient (see compare.Patient for that). Not all people with records here will be active users of the system We'll rely on Django-Reversion to capture changes to records, and thus the audit trail. """ from wp4.locations.models import Hospital # Constants to help reference specific staff groups (auth_groups, defined by fixtures) PERFUSION_TECHNICIAN = 1 #: Constant for Group ID TRANSPLANT_COORDINATOR = 2 #: Constant for Group ID RESEARCH_NURSE = 3 #: Constant for Group ID NATIONAL_COORDINATOR = 4 #: Constant for Group ID CENTRAL_COORDINATOR = 5 #: Constant for Group ID STATISTICIAN = 6 #: Constant for Group ID SYSTEMS_ADMINISTRATOR = 7 #: Constant for Group ID LOCAL_INVESTIGATOR = 8 #: Constant for Group ID OTHER_PROJECT_MEMBER = 9 #: Constant for Group ID BIOBANK_COORDINATOR = 10 #: Constant for Group ID CHIEF_INVESTIGATOR = 11 #: Constant for Group ID PRINCIPLE_INVESTIGATOR = 12 #: Constant for Group ID CENTRAL_INVESTIGATOR = 13 #: Constant for Group ID NATIONAL_INVESTIGATOR = 14 #: Constant for Group ID THEATRE_CONTACT = 15 #: Constant for Group ID _phone_regex = RegexValidator( regex=r'^\+?1?\d{9,15}$', message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed." ) _my_groups = None telephone = models.CharField( verbose_name=_("SP01 telephone number"), validators=[_phone_regex], max_length=15, blank=True, null=True ) #: Contents validated against phone_regex ``r'^\+?1?\d{9,15}$'`` based_at = models.ForeignKey( Hospital, on_delete=models.PROTECT, verbose_name=_("SP02 location"), blank=True, null=True, help_text="Link to a primary hospital location for the member of staff" ) def has_group(self, group_ids=[]): if self._my_groups is None: self._my_groups = [g.id for g in self.groups.all()] # print("DEBUG: I am {2}, and my_group_ids={0} and group_ids={1} ({3})".format( # my_group_ids, group_ids, self.get_full_name(), type(group_ids)) # ) if type(group_ids) in (list, tuple): for group in group_ids: if group in self._my_groups: return True else: if group_ids in self._my_groups: return True # print("DEBUG: has_group returning False") return False def _is_administrator(self): """ Checks for membership of an admin group :return: """ administrator_groups = (self.NATIONAL_COORDINATOR, self.CENTRAL_COORDINATOR, self.SYSTEMS_ADMINISTRATOR, self.BIOBANK_COORDINATOR, self.CHIEF_INVESTIGATOR, self.PRINCIPLE_INVESTIGATOR, self.NATIONAL_INVESTIGATOR, self.CENTRAL_INVESTIGATOR, self.STATISTICIAN,) if self.has_group(administrator_groups): return True elif self.has_group(self.CENTRAL_INVESTIGATOR) and settings.DEBUG is True: # Add a pass through for Ina when on the test server as per Issue #249 return True return False is_administrator = cached_property(_is_administrator, name='is_administrator') class Meta: verbose_name = _('SPm1 person') verbose_name_plural = _('SPm2 people') ordering = ['id'] permissions = ( ("single_person", "Can only view their own person record"), ) def get_absolute_url(self): return reverse("wp4:staff:detail", kwargs={"pk": self.pk}) def __str__(self): return self.get_full_name()
class BackingIndex(models.Model): slug = models.SlugField(unique=True) url = models.URLField(verbose_name=_('URL')) last_update_serial = models.BigIntegerField(null=True, blank=True) backend = models.CharField( max_length=255, choices=INDEX_BACKENDS, default=INDEX_BACKENDS.PYPI, ) class Meta: verbose_name_plural = _('backing indexes') def __str__(self): return self.slug def get_client(self): Client = import_string(self.backend) return Client(self.url) client = cached_property(get_client) def get_package(self, package_name, create=True): normalized_package_name = utils.normalize_package_name(package_name) if create: package, created = Package.objects.get_or_create( index=self, slug=normalized_package_name, defaults={'name': package_name}, ) else: package = self.package_set.get(slug=normalized_package_name) return package def itersync(self): serial = self.last_update_serial packages_to_update = self.client.iter_updated_packages(serial) for package_name, serial in packages_to_update: if package_name: if not self.import_package(package_name): # Nothing imported: remove the package slug = utils.normalize_package_name(package_name) Package.objects.filter(index=self, slug=slug).delete() cache.delete( Package.get_cache_version_key(self.slug, slug), ) if serial > self.last_update_serial: self.last_update_serial = serial yield self.last_update_serial self.save(update_fields=['last_update_serial']) def sync(self): for i in self.itersync(): pass def import_package(self, package_name): # log.info('importing {} from {}'.format(package_name, self.url)) try: versions = self.client.get_package_releases(package_name) except client.PackageNotFound: log.debug('package {} not found on {}' .format(package_name, self.url)) return if not versions: log.debug('no versions found for package {} on {}' .format(package_name, self.url)) return package = self.get_package(package_name) release_ids = [] for version, releases in six.iteritems(versions): release_details = package.get_best_release(releases) if not release_details: continue release = package.get_release(version, release_details) release_ids.append(release.pk) if release_ids: # Remove outdated releases package.release_set.exclude(pk__in=release_ids).delete() # Expire the cache package.expire_cache() return package.pk if release_ids else None def expire_cache(self): for slug in self.package_set.values_list('slug', flat=True): Package.expire_package_cache(self.slug, slug)
@property def number(self): return self.pk # Assign getters for the inherited properties. def get_from_comment(attr): fn = attrgetter(attr) def inner(self): return head(filter(bool, map(fn, self.comments.all()))) return inner for attr in INHERITED_PROPERTIES: setattr(Bug, attr, cached_property(get_from_comment(attr))) @python_2_unicode_compatible class Comment(models.Model): bug = models.ForeignKey('buggy.Bug', verbose_name=_('bug'), related_name='comments') user = models.ForeignKey(AUTH_USER_MODEL, verbose_name=_('created by')) created_at = models.DateTimeField(_('created at'), auto_now_add=True) # Bug properties title = models.CharField(_('title'), max_length=255, null=True, blank=True) assigned_to = models.ForeignKey(AUTH_USER_MODEL, null=True, blank=True, verbose_name=_('assigned to'), related_name='+')
from .utils import model_name class TenantMixin(object): """ View mixin that retrieve the current tenant from the request. This could have been set from a middleware base on a domain name for example. """ tenant_attr_name = get_tenant_model().ATTR_NAME def get_tenant(self): return getattr(self.request, self.tenant_attr_name) setattr(TenantMixin, get_tenant_model().ATTR_NAME, cached_property(lambda self: self.get_tenant())) class TenantObjectMixin(TenantMixin): """ View mixin that returns the correct queryset for the specified model based on the retrieved tenant. """ model = None context_object_name = None def get_model(self): if self.model: if not isinstance(self.model, TenantModelBase): msg = "%s.model is not an instance of TenantModelBase."
class MediaItem(models.Model): """ An individual media item in the media platform. Most fields in this model can store blank values since they are synced from external providers who may not have the degree of rigour we want. For the same reason, we have default values for most fields. """ @dataclasses.dataclass class Source: """An encoded media stream for a media item.""" #: MediaItem for this source (post Python3.7, we'll be able to refer to MediaItem as the #: type.) item: object #: Media type of the stream mime_type: str #: URL pointing to this source url: str #: Width of the stream or None if this is an audio stream width: typing.Optional[int] = None #: Height of the stream or None if this is an audio stream height: typing.Optional[int] = None VIDEO = 'video' AUDIO = 'audio' UNKNOWN = 'unknown' TYPE_CHOICES = ((VIDEO, 'Video'), (AUDIO, 'Audio')) LANGUAGE_CHOICES = tuple( itertools.chain([('', 'None')], sorted(((language.part3, language.name) for language in languages), key=lambda choice: choice[1]))) #: Object manager. See :py:class:`~.MediaItemManager`. The objects returned by this manager do #: not include deleted objects. See :py:attr:\~.objects_including_deleted`. objects = MediaItemManager() #: Object manager whose objects include the deleted items. This has been separated out into a #: separate manager to avoid inadvertently including deleted objects in a query objects_including_deleted = MediaItemManager(include_deleted=True) #: Primary key id = models.CharField(max_length=_TOKEN_LENGTH, primary_key=True, default=_make_token, editable=False) #: Channel which contains media item - if NULL, then the media item is in no channel. channel = models.ForeignKey('Channel', help_text='Channel containing media item', null=True, on_delete=models.SET_NULL, related_name='items') #: Media item title title = models.TextField(help_text='Title of media item', blank=True, default='') #: Media item description description = models.TextField(help_text='Description of media item', blank=True, default='') #: Duration duration = models.FloatField(editable=False, help_text='Duration of video', default=0.) #: Type of media. type = models.CharField( max_length=10, choices=TYPE_CHOICES, default=UNKNOWN, editable=False, help_text='Type of media (video, audio or unknown)') #: Publication date published_at = models.DateTimeField( null=True, blank=True, help_text='Date from which video is visible') #: Downloadable flag downloadable = models.BooleanField( default=False, help_text='If this item can be viewed, can it also be downloaded?') #: ISO 693-3 language code language = models.CharField( max_length=3, default='', blank=True, choices=LANGUAGE_CHOICES, help_text= ('ISO 639-3 three letter language code describing majority language used in this item' )) #: Video copyright copyright = models.TextField( blank=True, default='', help_text='Free text describing Copyright holder') #: List of tags for video tags = pgfields.ArrayField(models.CharField(max_length=256), default=_blank_array, blank=True, help_text='Tags/keywords for item') #: Creation time created_at = models.DateTimeField(auto_now_add=True) #: Last update time updated_at = models.DateTimeField(auto_now=True) #: Deletion time. If non-NULL, the item has been "deleted" and should not usually be visible. deleted_at = models.DateTimeField(null=True, blank=True) def __str__(self): return '{} ("{}")'.format(self.id, self.title) def get_sources(self, only_if_downloadable=True): """ Retrieve and return a list of :py:class:`~.MediaItem.Source` instances representing the raw media sources for this media item. By default, the source list will be empty if the media item is not marked as downloadable. Set the *only_if_downloadable* parameter to ``True`` to skip this check. """ if (not self.downloadable and only_if_downloadable) or not hasattr(self, 'jwp'): return [] return self.jwp.sources #: Cached property which calls get_sources() to retrieve sources. sources = cached_property(get_sources, name='sources') def get_embed_url(self): """ Return a URL suitable for use in an IFrame which will render this media. This URL renders the media unconditionally; it does not respect any view permissions. """ if not hasattr(self, 'jwp'): return None return self.jwp.embed_url
class DatabaseCreation(BaseDatabaseCreation): def _create_master_connection(self): """ Create a transactionless connection to 'master' database. """ settings_dict = self.connection.settings_dict.copy() settings_dict['NAME'] = 'master' nodb_connection = type(self.connection)(settings_dict, alias=NO_DB_ALIAS, allow_thread_sharing=False) return nodb_connection _nodb_connection = cached_property(_create_master_connection) def mark_tests_as_expected_failure(self, failing_tests): """ Flag tests as expectedFailure. This should only run during the testsuite. """ django_version = django.VERSION[:2] for test_name, versions in six.iteritems(failing_tests): if not versions or not isinstance(versions, (list, tuple)): # skip None, empty, or invalid continue if not isinstance(versions[0], (list, tuple)): # Ensure list of versions versions = [versions] if all(map(lambda v: v[:2] != django_version, versions)): continue try: test_case_name, _, method_name = test_name.rpartition('.') test_case = import_string(test_case_name) method = getattr(test_case, method_name) method = expectedFailure(method) setattr(test_case, method_name, method) except (ImportError, ImproperlyConfigured): pass def create_test_db(self, *args, **kwargs): self.mark_tests_as_expected_failure( self.connection.features.failing_tests) super(DatabaseCreation, self).create_test_db(*args, **kwargs) def _create_test_db(self, verbosity=1, autoclobber=False, keepdb=False): """ Create the test databases using a connection to database 'master'. """ if self._test_database_create(settings): try: test_database_name = super(DatabaseCreation, self)._create_test_db( verbosity, autoclobber) except Exception as e: if 'Choose a different database name.' in str(e): six.print_( 'Database "%s" could not be created because it already exists.' % test_database_name) else: six.reraise(*sys.exc_info()) self.install_regex_clr(test_database_name) return test_database_name if verbosity >= 1: six.print_("Skipping Test DB creation") return self._get_test_db_name() def _destroy_test_db(self, test_database_name, verbosity=1): """ Drop the test databases using a connection to database 'master'. """ if not self._test_database_create(settings): if verbosity >= 1: six.print_("Skipping Test DB destruction") return for alias in connections: connections[alias].close() try: with self._nodb_connection.cursor() as cursor: qn_db_name = self.connection.ops.quote_name(test_database_name) # boot all other connections to the database, leaving only this connection cursor.execute( "ALTER DATABASE %s SET SINGLE_USER WITH ROLLBACK IMMEDIATE" % qn_db_name) time.sleep(1) # database is now clear to drop cursor.execute("DROP DATABASE %s" % qn_db_name) except Exception: # if 'it is currently in use' in str(e): # six.print_('Cannot drop database %s because it is in use' % test_database_name) # else: six.reraise(*sys.exc_info()) def _test_database_create(self, settings): """ Check the settings to see if the test database should be created. """ if 'TEST_CREATE' in self.connection.settings_dict: return self.connection.settings_dict.get('TEST_CREATE', True) if hasattr(settings, 'TEST_DATABASE_CREATE'): return settings.TEST_DATABASE_CREATE else: return True def enable_clr(self): """ Enables clr for server if not already enabled This function will not fail if current user doesn't have permissions to enable clr, and clr is already enabled """ with self._nodb_connection.cursor() as cursor: # check whether clr is enabled cursor.execute(''' SELECT value FROM sys.configurations WHERE name = 'clr enabled' ''') res = cursor.fetchone() if not res or not res[0]: # if not enabled enable clr cursor.execute("sp_configure 'clr enabled', 1") cursor.execute("RECONFIGURE") def install_regex_clr(self, database_name): sql = ''' USE {database_name}; -- Drop and recreate the function if it already exists IF OBJECT_ID('REGEXP_LIKE') IS NOT NULL DROP FUNCTION [dbo].[REGEXP_LIKE] IF EXISTS(select * from sys.assemblies where name like 'regex_clr') DROP ASSEMBLY regex_clr ; CREATE ASSEMBLY regex_clr FROM 0x{assembly_hex} WITH PERMISSION_SET = SAFE; create function [dbo].[REGEXP_LIKE] ( @input nvarchar(max), @pattern nvarchar(max), @caseSensitive int ) RETURNS INT AS EXTERNAL NAME regex_clr.UserDefinedFunctions.REGEXP_LIKE '''.format( database_name=self.connection.ops.quote_name(database_name), assembly_hex=self.get_regex_clr_assembly_hex(), ).split(';') self.enable_clr() with self._nodb_connection.cursor() as cursor: for s in sql: cursor.execute(s) def get_regex_clr_assembly_hex(self): import os import binascii with open(os.path.join(os.path.dirname(__file__), 'regex_clr.dll'), 'rb') as f: assembly = binascii.hexlify(f.read()).decode('ascii') return assembly
class BaseVolumeDriver(object): content_encoding = 'UTF-8' def __init__(self, request=None, *args, **kwargs): self.args = args self.kwargs = kwargs self.request = request def get_volume_id(self): """ Returns the volume ID for the volume, which is used as a prefix for client hashes. """ raise NotImplementedError def _get_connector_url(self): """:return url of driver connector""" view_name = self.kwargs.get('connector_url_view_name', 'elfinder_connector') collection_id = self.kwargs.get('collection_id') if collection_id: url = reverse(view_name, kwargs={'coll_id': collection_id}) else: url = reverse(view_name) return url connector_url = cached_property(_get_connector_url) @cached_property def login_required(self): return bool(self.kwargs.get('login_required')) @cached_property def login_url(self): return self.kwargs.get('login_url') @cached_property def login_test_func(self): test_func = self.kwargs.get('login_test_func') if isinstance(test_func, string_types): test_func = import_string(test_func) else: test_func = lambda u: u.is_authenticated return test_func def get_options(self): """Volume config defaults""" options = { 'uplMaxSize': '128M', 'options': { 'separator': '/', 'disabled': [], 'copyOverwrite': 1 } } options.update(self.kwargs.get('js_api_options', {})) return options def get_index_template(self, template): """Template that render the index view.""" return self.kwargs.get('index_template', template) def get_info(self, target): """ Returns a dict containing information about the target directory or file. This data is used in response to 'open' commands to populates the 'cwd' response var. :param target: The hash of the directory for which we want info. If this is '', return information about the root directory. :returns: dict -- A dict describing the directory. """ raise NotImplementedError def zip_download(self, targets, dl=False): """ Prepare files for download :param targets[]: array of hashed paths of the nodes :returns: dict -- A dict describing the zip file. """ raise NotImplementedError def get_tree(self, target, ancestors=False, siblings=False): """ Gets a list of dicts describing children/ancestors/siblings of the target. :param target: The hash of the directory the tree starts from. :param ancestors: Include ancestors of the target. :param siblings: Include siblings of the target. :param children: Include children of the target. :returns: list -- a list of dicts describing directories. """ raise NotImplementedError def read_file_view(self, request, hash, **kwargs): """ Django view function, used to display files in response to the 'file' command. :param request: The original HTTP request. :param hash: The hash of the target file. :returns: dict -- a dict describing the new directory. """ raise NotImplementedError def get(self, target, conv): """ Returns the content as String (As UTF-8) :param target : hash of the file :param conv : instructions for character encoding conversion of the text file 1 : auto detect encoding(Return false as content in response data when failed) 0 : auto detect encoding(Return { "doconv" : "unknown" } as response data when failed) Original Character encoding : original character encoding as specified by the user """ raise NotImplementedError def search(self, text, target, reqid): """ Search for file/directory :param query: search string. :param hash: The hash of the parent directory. :param reqid: request session id. :returns: mimes """ raise NotImplementedError def mkdir(self, name, parent): """ Creates a directory. :param name: The name of the new directory. :param parent: The hash of the parent directory. :returns: dict -- a dict describing the new directory. """ raise NotImplementedError def mkfile(self, name, parent): """ Creates a directory. :param name: The name of the new file. :param parent: The hash of the parent directory. :returns: dict -- a dict describing the new file. """ raise NotImplementedError def putfile(self, target, content, **kwargs): """ Update the contents of an existing file.. :param target: The hash of the file being changed. :param content: The contents of the file. :param kwargs: optional encoding :returns: list -- of files that were successfully uploaded. """ raise NotImplementedError def rename(self, name, target): """ Renames a file or directory. :param name: The new name of the file/directory. :param target: The hash of the target file/directory. :returns: dict -- a dict describing which objects were added and removed. """ raise NotImplementedError def duplicate(self, targets): """Creates a copy of the directory / file. Copy name is generated as follows: basedir_name_filecopy+serialnumber.extension (if any) """ def list(self, target): """ Lists the contents of a directory. :param target: The hash of the target directory. :returns: list -- a list containing the names of files/directories in this directory. """ raise NotImplementedError def paste(self, targets, dest, cut, **kwargs): """ Moves/copies target files/directories from source to dest. If a file with the same name already exists in the dest directory it should be overwritten (the client asks the user to confirm this before sending the request). :param targets: A list of hashes of files/dirs to move/copy. :param source: The current parent of the targets. :param dest: The new parent of the targets. :param cut: Boolean. If true, move the targets. If false, copy the targets. :returns: dict -- a dict describing which targets were moved/copied. """ raise NotImplementedError def size(self, targets): """ Returns the size of a directory or file. size: The total size for all the supplied targets. fileCnt: The total counts of the file for all the supplied targets. (Optional to API >= 2.1025) dirCnt: The total counts of the directory for all the supplied targets. (Optional to API >= 2.1025) sizes: An object of each target size infomation. (Optional to API >= 2.1030) """ raise NotImplementedError def remove(self, target): """ Deletes the target files/directories. The 'rm' command takes a list of targets - this function is called for each target, so should only delete one file/directory. :param targets: A list of hashes of files/dirs to delete. :returns: list -- warnings generated when trying to remove a file or directory. """ raise NotImplementedError def upload(self, files, parent): """ Uploads one or more files in to the parent directory. :param files: A list of uploaded file objects, as described here: https://docs.djangoproject.com/en/dev/topics/http/file-uploads/ :param parent: The hash of the directory in which to create the new files. :returns: TODO """ def dim(self, target, **kwargs): """ Returns the dimensions of an image/video Arguments: cmd : dim target : hash path of the node substitute : pixel that requests substitute image (optional) - API >= 2.1030 Response: dim: The dimensions of the media in the format {width}x{height} (e.g. "640x480"). url: The URL of requested substitute image. (optional) """ raise NotImplementedError def resize(self, target, **kwargs): """ Change the size of an image. :param target: The hash of the target file/directory. :kwargs: dict -- cmd : resize mode : 'resize' or 'crop' or 'rotate' target : hash of the image path width : new image width height : new image height x : x of crop (mode='crop') y : y of crop (mode='crop') degree : rotate degree (mode='rotate') quality """ raise NotImplementedError def upload_chunked(self, files, target, cid, chunk, bytes_range): """ Chunking arguments: chunk : chunk name "filename.[NUMBER]_[TOTAL].part" cid : unique id of chunked uploading file range : Bytes range of file "Start byte,Chunk length,Total bytes """ pass def upload_chunked_req(self, files, parent, chunk): """Chunk merge request (When receive _chunkmerged, _name)""" pass def abort(self, reqid): """Aborts an operation in progress.""" pass
from .models import TenantModelBase, TenantSpecificModel class TenantMixin(object): """ View mixin that retrieve the current tenant from the request. This could have been set from a middleware base on a domain name for example. """ tenant_attr_name = get_tenant_model().ATTR_NAME def get_tenant(self): return getattr(self.request, self.tenant_attr_name) setattr( TenantMixin, get_tenant_model().ATTR_NAME, cached_property(lambda self: self.get_tenant()) ) class TenantObjectMixin(TenantMixin): """ View mixin that returns the correct queryset for the specified model based on the retrieved tenant. """ model = None context_object_name = None def get_model(self): if self.model: if not isinstance(self.model, TenantModelBase): msg = "%s.model is not an instance of TenantModelBase."
def test_cached_property_name_validation(self): msg = "%s can't be used as the name of a cached_property." with self.assertRaisesMessage(ValueError, msg % "'<lambda>'"): cached_property(lambda x: None) with self.assertRaisesMessage(ValueError, msg % 42): cached_property(str, name=42)
from django.utils.functional import cached_property from cms.toolbar import toolbar from djangocms_versioning.plugin_rendering import VersionRenderer def content_renderer(self): return VersionRenderer(request=self.request) toolbar.CMSToolbar.content_renderer = cached_property( content_renderer) # noqa: E305
def c_whitelister(self): db_wl = CustomDbWhitelister(self.converter_rules) # db_wl.element_rules = CUSTOM_DEFAULT_ELEMENT_RULES.copy() setattr(db_wl, "element_rules", CUSTOM_DEFAULT_ELEMENT_RULES.copy()) return db_wl if wagtail_version == 2: CustomDbWhitelister = type('CustomDbWhitelister', (DbWhitelisterToDecorate, ), {'clean_tag_node': c_clean_tag_node}) globals()['CustomDbWhitelister'] = CustomDbWhitelister CustomEditorHTMLConverter = type( 'CustomEditorHTMLConverter', (EditorHTMLConverterToDecorate, ), {'whitelister': cached_property(c_whitelister)}) globals()['CustomEditorHTMLConverter'] = CustomEditorHTMLConverter class CKEditor(WidgetWithScript, widgets.Textarea): def __init__(self, *args, **kwargs): if wagtail_version == 2: self.features = features.get_default_features() self.converter = CustomEditorHTMLConverter(self.features) # self.converter.converter_rules = DEFAULT_ELEMENT_RULES super().__init__(*args, **kwargs) def get_panel(self): return RichTextFieldPanel
class RetrievalTeam(models.Model): """ Lookup class for the preset Retrieval Team list. Doesn't inherit from AuditControlModelBase as this is primarily a preset list of data, with helper functions attached. """ from wp4.locations.models import Hospital, UNITED_KINGDOM centre_code = models.PositiveSmallIntegerField( verbose_name=_("RT01 centre code"), validators=[MinValueValidator(10), MaxValueValidator(99)], help_text="Value must be in the range 10-99") based_at = models.ForeignKey(Hospital, on_delete=models.PROTECT, verbose_name=_("RT02 base hospital")) objects = RetrievalTeamModelForUserManager() class Meta: ordering = ['centre_code'] verbose_name = _('RTm1 retrieval team') verbose_name_plural = _('RTm2 retrieval teams') permissions = ( # ("view_retrievalteam", "Can only view the data"), # Replaced by Django 2.1 functionality ("restrict_to_national", "Can only use data from the same location country"), ("restrict_to_local", "Can only use data from a specific location"), ) def country_for_restriction(self): """ Get the country to be used for geographic restriction of this data :return: Int: Value from list in Locations.Models. Should be in range [1,4,5] """ return self.based_at.country def location_for_restriction(self): """ Get the location to be used for geographic restriction of this data :return: Int: Hospital object id """ return self.based_at.id def next_sequence_number(self, is_online=True): """ Return the next available sequence number, taking into account that the donor must already be linked to a randomisation record, and thus we're able to deduce if this is an online or offline case. :param: is_online: bool. Are we looking for the online or the offline sequence :return: Next free number in a linear sequence :rtype: int """ list_code = self.get_randomisation_list(is_online) donor_set = self.donor_set.filter(randomisation__list_code=list_code) try: number = donor_set.latest('sequence_number').sequence_number + 1 except models.Model.DoesNotExist: number = 1 return number def get_randomisation_list(self, is_online=True): """ Returns the id of the relevant randomisation list for the location of this team :param is_online: True, select from the online lists. False, select from the offline lists :return: Number matching one of the LIST_CHOICE constants :rtype: int """ if self.based_at.country == RetrievalTeam.UNITED_KINGDOM: if is_online: return Randomisation.LIVE_UNITED_KINGDOM else: return Randomisation.PAPER_UNITED_KINGDOM else: if is_online: return Randomisation.LIVE_EUROPE else: return Randomisation.PAPER_EUROPE def _name(self): """ Human readable name for the retrieval team :return: (Centre Code) Team Location Description :rtype: str """ return '({0:d}) {1}'.format(self.centre_code, self.based_at.full_description) name = cached_property(_name, name='name') def _based_in_country(self): return self.based_at.get_country_display() based_in_country = cached_property(_based_in_country, name='based_in_country') def __str__(self): return self.name
class SparkJob(EMRReleaseModel, CreatedByModel, EditedAtModel, URLActionModel): """ A data model to store details about a scheduled Spark job, to be run on AWS EMR. """ INTERVAL_DAILY = 24 INTERVAL_WEEKLY = INTERVAL_DAILY * 7 INTERVAL_MONTHLY = INTERVAL_DAILY * 30 INTERVAL_CHOICES = [ (INTERVAL_DAILY, "Daily"), (INTERVAL_WEEKLY, "Weekly"), (INTERVAL_MONTHLY, "Monthly"), ] RESULT_PRIVATE = "private" RESULT_PUBLIC = "public" RESULT_VISIBILITY_CHOICES = [(RESULT_PRIVATE, "Private"), (RESULT_PUBLIC, "Public")] identifier = models.CharField( max_length=100, help_text="Job name, used to uniqely identify individual jobs.", unique=True, db_index=True, ) description = models.TextField(help_text="Job description.", default="") notebook_s3_key = models.CharField( max_length=800, help_text= "S3 key of the notebook after uploading it to the Spark code bucket.", ) result_visibility = models.CharField( # can currently be "public" or "private" max_length=50, help_text= "Whether notebook results are uploaded to a public or private bucket", choices=RESULT_VISIBILITY_CHOICES, default=RESULT_PRIVATE, ) size = models.IntegerField( help_text="Number of computers to use to run the job.") interval_in_hours = models.IntegerField( help_text="Interval at which the job should run, in hours.", choices=INTERVAL_CHOICES, default=INTERVAL_DAILY, ) job_timeout = models.IntegerField( help_text="Number of hours before the job times out.") start_date = models.DateTimeField( help_text="Date/time that the job should start being scheduled to run." ) end_date = models.DateTimeField( blank=True, null=True, help_text= "Date/time that the job should stop being scheduled to run, null if no end date.", ) expired_date = models.DateTimeField( blank=True, null=True, help_text="Date/time that the job was expired.", db_index=True, ) is_enabled = models.BooleanField( default=True, help_text="Whether the job should run or not.") objects = SparkJobQuerySet.as_manager() class Meta: permissions = [("view_sparkjob", "Can view Spark job")] __str__ = autostr("{self.identifier}") __repr__ = autorepr(["identifier", "size", "is_enabled"]) url_prefix = "jobs" url_actions = ["delete", "detail", "download", "edit", "run", "zeppelin"] def get_absolute_url(self): return self.urls.detail @property def provisioner(self): return SparkJobProvisioner() # TEMPORARY till we have 1:1 relationship to cluster object # and we can then ask for spark_job.cluster.provisioner @property def cluster_provisioner(self): return ClusterProvisioner() @property def schedule(self): from .schedules import SparkJobSchedule return SparkJobSchedule(self) def has_future_end_date(self, now): # no end date means it'll always be due if self.end_date is None: return True return self.end_date >= now @property def has_never_run(self): """ Whether the job has run before. Looks at both the cluster status and our own record when we asked it to run. """ return (self.latest_run is None or self.latest_run.status == DEFAULT_STATUS or self.latest_run.scheduled_at is None) @property def has_finished(self): """Whether the job's cluster is terminated or failed""" return self.latest_run and self.latest_run.status in Cluster.FINAL_STATUS_LIST @property def has_timed_out(self): """ Whether the current job run has been running longer than the job's timeout allows. """ if self.has_never_run: # Job isn't even running at the moment and never ran before return False timeout_delta = timedelta(hours=self.job_timeout) max_run_time = self.latest_run.scheduled_at + timeout_delta timed_out = timezone.now() >= max_run_time return not self.is_runnable and timed_out @property def is_due(self): """ Whether the start date is in the past and the end date is in the future. """ now = timezone.now() has_past_start_date = self.start_date <= now return has_past_start_date and self.has_future_end_date(now) @property def is_runnable(self): """ Either the job has never run before or was never finished. This is checked right before the actual provisioning. """ return self.has_never_run or self.has_finished @property def should_run(self): """Whether the scheduled Spark job should run.""" return self.is_runnable and self.is_enabled and self.is_due @property def is_public(self): return self.result_visibility == self.RESULT_PUBLIC @property def is_active(self): return self.latest_run and self.latest_run.status in Cluster.ACTIVE_STATUS_LIST @property def notebook_name(self): return self.notebook_s3_key.rsplit("/", 1)[-1] @cached_property def notebook_s3_object(self): return self.provisioner.get(self.notebook_s3_key) @cached_property def results(self): return self.provisioner.results(self.identifier, self.is_public) def get_latest_run(self): try: return self.runs.latest() except SparkJobRun.DoesNotExist: return None latest_run = cached_property(get_latest_run, name="latest_run") def run(self): """Actually run the scheduled Spark job.""" # if the job ran before and is still running, don't start it again if not self.is_runnable: return jobflow_id = self.provisioner.run( user_username=self.created_by.username, user_email=self.created_by.email, identifier=self.identifier, emr_release=self.emr_release.version, size=self.size, notebook_key=self.notebook_s3_key, is_public=self.is_public, job_timeout=self.job_timeout, ) # Create new job history record. run = self.runs.create( spark_job=self, jobflow_id=jobflow_id, scheduled_at=timezone.now(), emr_release_version=self.emr_release.version, size=self.size, ) # Remove the cached latest run to this objects will requery it. try: delattr(self, "latest_run") except AttributeError: # pragma: no cover pass # It didn't have a `latest_run` and that's ok. with transaction.atomic(): Metric.record("sparkjob-emr-version", data={"version": self.emr_release.version}) # sync with EMR API transaction.on_commit(run.sync) def expire(self): # TODO disable the job as well once it's easy to re-enable the job deleted = self.schedule.delete() self.expired_date = timezone.now() self.save() return deleted def terminate(self): """Stop the currently running scheduled Spark job.""" if self.latest_run: self.cluster_provisioner.stop(self.latest_run.jobflow_id) def first_run(self): if self.latest_run: return None from .tasks import run_job return run_job.apply_async( args=(self.pk, ), kwargs={"first_run": True}, # make sure we run this task only when we expect it # may be in the future, may be in the past # but definitely at a specific time eta=self.start_date, ) def save(self, *args, **kwargs): # whether the job is being created for the first time first_save = self.pk is None # resetting expired_date in case a user resets the end_date if self.expired_date and self.end_date and self.end_date > timezone.now( ): self.expired_date = None super().save(*args, **kwargs) # Remove the cached latest run to this objects will requery it. try: delattr(self, "latest_run") except AttributeError: # pragma: no cover pass # It didn't have a `latest_run` and that's ok. # first remove if it exists self.schedule.delete() # and then add it, but only if the end date is in the future if self.has_future_end_date(timezone.now()): self.schedule.add() if first_save: transaction.on_commit(self.first_run) def delete(self, *args, **kwargs): # make sure to shut down the cluster if it's currently running self.terminate() # make sure to clean up the job notebook from storage self.provisioner.remove(self.notebook_s3_key) self.schedule.delete() super().delete(*args, **kwargs)
class System(auto_prefetch.Model): root_element = auto_prefetch.ForeignKey( Element, related_name="system", on_delete=models.CASCADE, help_text= "The Element that is this System. Element must be type [Application, General Support System]" ) fisma_id = models.CharField(max_length=40, help_text="The FISMA Id of the system", unique=False, blank=True, null=True) # Notes # Retrieve system implementation statements # system = System.objects.get(pk=2) # system.root_element.statements_consumed.filter(statement_type="control_implementation") # # Retrieve system common controls statements # system = System.objects.get(pk=2) # system.root_element.common_controls.all()[0].common_control.legacy_imp_smt # system.root_element.common_controls.all()[0].common_control.body # def __str__(self): return "'System %s id=%d'" % (self.root_element.name, self.id) def __repr__(self): # For debugging. return "'System %s id=%d'" % (self.root_element.name, self.id) # @property # def statements_consumed(self): # smts = self.root_element.statements_consumed.all() # return smts def assign_owner_permissions(self, user): try: permissions = get_perms_for_model(System) for perm in permissions: assign_perm(perm.codename, user, self) return True except: return False def assign_edit_permissions(self, user): try: permissions = ['view_system', 'change_system', 'add_system'] for perm in permissions: assign_perm(perm, user, self) return True except: return False @property def smts_common_controls_as_dict(self): common_controls = self.root_element.common_controls.all() smts_as_dict = {} for cc in common_controls: if cc.common_control.oscal_ctl_id in smts_as_dict: smts_as_dict[cc.common_control.oscal_ctl_id].append(cc) else: smts_as_dict[cc.common_control.oscal_ctl_id] = [cc] return smts_as_dict @property def smts_control_implementation_as_dict(self): smts = self.root_element.statements_consumed.filter( statement_type="control_implementation").order_by('pid') smts_as_dict = {} for smt in smts: if smt.sid in smts_as_dict: smts_as_dict[smt.sid]['control_impl_smts'].append(smt) else: smts_as_dict[smt.sid] = { "control_impl_smts": [smt], "common_controls": [], "combined_smt": "" } return smts_as_dict @cached_property def control_implementation_as_dict(self): pid_current = None # Fetch all selected controls elm = self.root_element selected_controls = elm.controls.all().values("oscal_ctl_id", "uuid") # Get the smts_control_implementations ordered by part, e.g. pid smts = elm.statements_consumed.filter( statement_type="control_implementation").order_by('pid') smts_as_dict = {} # Retrieve all of the existing statements for smt in smts: if smt.sid in smts_as_dict: smts_as_dict[smt.sid]['control_impl_smts'].append(smt) else: try: elementcontrol = self.root_element.controls.get( oscal_ctl_id=smt.sid, oscal_catalog_key=smt.sid_class) smts_as_dict[smt.sid] = { "control_impl_smts": [smt], "common_controls": [], "combined_smt": "", "elementcontrol_uuid": elementcontrol.uuid, "combined_smt_uuid": uuid.uuid4() } except ElementControl.DoesNotExist: # Handle case where Element control does not exist elementcontrol = None smts_as_dict[smt.sid] = { "control_impl_smts": [smt], "common_controls": [], "combined_smt": "", "elementcontrol_uuid": None, "combined_smt_uuid": uuid.uuid4() } # Build combined statement # Define status options impl_statuses = [ "Not implemented", "Planned", "Partially implemented", "Implemented", "Unknown" ] status_str = "" for status in impl_statuses: if (smt.status is not None) and (smt.status.lower() == status.lower()): status_str += f'[x] {status} ' else: status_str += f'<span style="color: #888;">[ ] {status}</span> ' # Conditionally add statement part in the beginning of a block of statements related to a part if smt.pid != "" and smt.pid != pid_current: smts_as_dict[smt.sid]['combined_smt'] += f"{smt.pid}.\n" pid_current = smt.pid # DEBUG # TODO # Poor performance, at least in some instances, appears to being caused by `smt.producer_element.name` # parameter in the below statement. if smt.producer_element: smts_as_dict[smt.sid][ 'combined_smt'] += f"<i>{smt.producer_element.name}</i>\n{status_str}\n\n{smt.body}\n\n" # When "smt.producer_element.name" the provided as a fixed string (e.g, "smt.producer_element.name") # for testing purposes, the loop runs 3x faster # The reference `smt.producer_element.name` appears to be calling the database and creating poor performance # even where there are no statements. # Deprecated implementation of inherited/common controls # Leave commented out until we can fully delete...Greg - 2020-10-12 # # Add in the common controls # for cc in self.root_element.common_controls.all(): # if cc.common_control.oscal_ctl_id in smts_as_dict: # smts_as_dict[smt.sid]['common_controls'].append(cc) # else: # smts_as_dict[cc.common_control.oscal_ctl_id] = {"control_impl_smts": [], "common_controls": [cc], "combined_smt": ""} # # Build combined statement # smts_as_dict[cc.common_control.oscal_ctl_id]['combined_smt'] += "{}\n{}\n\n".format(cc.common_control.name, cc.common_control.body) # Populate any controls from assigned baseline that do not have statements for ec in selected_controls: if ec.get('oscal_ctl_id') not in smts_as_dict: smts_as_dict[ec.get('oscal_ctl_id')] = { "control_impl_smts": [], "common_controls": [], "combined_smt": "", "elementcontrol_uuid": ec.get('ec.uuid'), "combined_smt_uuid": uuid.uuid4() } # Return the dictionary return smts_as_dict @cached_property def controls_status_count(self): """Retrieve counts of control status""" status_list = [ 'Not Implemented', 'Planned', 'Partially Implemented', 'Implemented', 'Unknown' ] status_stats = {} # Fetch all selected controls elm = self.root_element for status in status_list: # Get the smts_control_implementations ordered by part, e.g. pid status_stats[status] = elm.statements_consumed.filter( statement_type="control_implementation", status=status).count() # TODO add index on statement status # Get overall controls addressed (e.g., covered) status_stats['Addressed'] = elm.statements_consumed.filter( statement_type="control_implementation").values('sid').count() return status_stats @cached_property def poam_status_count(self): """Retrieve counts of poam status""" # Temporarily hard code status list status_list = ['Open', 'Closed', "In Progress"] # TODO # Get a unique filter of status list and gather on that... status_stats = {} # Fetch all selected controls elm = self.root_element for status in status_list: # Get the smts_control_implementations ordered by part, e.g. pid status_stats[status] = elm.statements_consumed.filter( statement_type="POAM", status__iexact=status).count() # TODO add index on statement status return status_stats # @property (See below for creation of property from method) def get_producer_elements(self): smts = self.root_element.statements_consumed.all() components = set() for smt in smts: if smt.producer_element: components.add(smt.producer_element) components = list(components) components.sort(key=lambda component: component.name) return components producer_elements = cached_property(get_producer_elements)
class Recipient(AuditControlModelBase): """ Extension of an Patient record (via OneToOne link for good ORM/DB management) to capture the Recipient specific data. Linked also to a single Organ (Kidney), and, for convenience, an OrganAllocation (once confirmed) Also holds the meta-data specific to the Transplantation Form """ person = models.OneToOneField(Patient, on_delete=models.PROTECT, help_text="Internal link to Patient") organ = models.OneToOneField(Organ, on_delete=models.PROTECT, help_text="Internal link to Organ") allocation = models.OneToOneField( OrganAllocation, on_delete=models.PROTECT, help_text="Internal link to OrganAllocation") # Trial signoffs signed_consent = models.NullBooleanField( verbose_name=_("RE13 informed consent given"), blank=True, default=None) single_kidney_transplant = models.NullBooleanField( verbose_name=_("RE14 receiving one kidney"), blank=True, default=None) # Recipient details (in addition to Patient) RENAL_DISEASE_CHOICES = ( (1, _('REc04 Glomerular diseases')), (2, _('REc05 Polycystic kidneys')), (3, _('REc06 Uncertain etiology')), (4, _('REc07 Tubular and interstitial diseases')), (5, _('REc08 Retransplant graft failure')), (6, _('REc09 diabetic nephropathyes')), (7, _('REc10 hypertensive nephropathyes')), (8, _('REc11 congenital rare disorders')), (9, _('REc12 renovascular and other diseases')), (10, _('REc13 neoplasms')), (11, _('REc14 other'))) #: Recipient renal_disease choices renal_disease = models.PositiveSmallIntegerField( verbose_name=_('RE15 renal disease'), choices=RENAL_DISEASE_CHOICES, blank=True, null=True) renal_disease_other = models.CharField( verbose_name=_('RE16 other renal disease'), max_length=250, blank=True) pre_transplant_diuresis = models.PositiveSmallIntegerField( verbose_name=_('RE17 diuresis (ml/24hr)'), blank=True, null=True) panel_reactive_antibodies = models.PositiveSmallIntegerField( verbose_name=_('RE99 panel reactive antibodies'), blank=True, null=True, validators=[MinValueValidator(0), MaxValueValidator(100)], default=None) # Added as per #336 panel_reactive_antibodies_unknown = models.BooleanField( default=False, help_text="Internal unknown flag") # Peri-operative data INCISION_CHOICES = ( (1, _('REc15 midline laparotomy')), (2, _('REc16 hockey stick')), (3, _('REc17 unknown'))) #: Recipient incision choices ARTERIAL_PROBLEM_CHOICES = ( (1, _('REc18 None')), (2, _('REc19 ligated polar artery')), (3, _('REc20 reconstructed polar artery')), (4, _('REc21 repaired intima dissection')), (5, _('REc22 other')) ) #: Recipient arterial_problems choices VENOUS_PROBLEM_CHOICES = ( (1, _('REc23 none')), (2, _('REc24 laceration')), (3, _('REc25 elongation plasty')), (4, _('REc26 other')) ) #: Recipient venous_problems choices knife_to_skin = models.DateTimeField( verbose_name=_('RE18 knife to skin time'), blank=True, null=True, validators=[validate_between_1900_2050, validate_not_in_future], help_text="Date must be fall within 1900-2050, and not be in the future" ) perfusate_measure = models.FloatField(verbose_name=_('RE19 pO2 perfusate'), blank=True, null=True) # TODO: Check the value range for perfusate_measure perfusion_stopped = models.DateTimeField( verbose_name=_('RE20 stop machine perfusion'), blank=True, null=True, validators=[validate_between_1900_2050, validate_not_in_future], help_text="Date must be fall within 1900-2050, and not be in the future" ) organ_cold_stored = models.BooleanField( verbose_name=_('RE21 kidney was cold stored?'), default=False) tape_broken = models.PositiveSmallIntegerField( verbose_name=_('RE22 tape over regulator broken'), blank=True, null=True, choices=YES_NO_UNKNOWN_CHOICES, ) #: Limit choices to YES_NO_UNKNOWN_CHOICES removed_from_machine_at = models.DateTimeField( verbose_name=_('RE23 kidney removed from machine at'), blank=True, null=True, validators=[validate_between_1900_2050, validate_not_in_future], help_text="Date must be fall within 1900-2050, and not be in the future" ) oxygen_full_and_open = models.PositiveSmallIntegerField( verbose_name=_('RE24 oxygen full and open'), choices=YES_NO_UNKNOWN_CHOICES, blank=True, null=True) #: Limit choices to YES_NO_UNKNOWN_CHOICES organ_untransplantable = models.NullBooleanField( verbose_name=_('RE25 kidney discarded'), help_text=_( "REh25 Either answer means further questions will open below"), blank=True, null=True) organ_untransplantable_reason = models.CharField( verbose_name=_('RE26 untransplantable because'), max_length=250, blank=True) anesthesia_started_at = models.DateTimeField( verbose_name=_('RE27 start anesthesia at'), blank=True, null=True, validators=[validate_between_1900_2050, validate_not_in_future], help_text="Date must be fall within 1900-2050, and not be in the future" ) incision = models.PositiveSmallIntegerField( verbose_name=_('RE28 incision'), choices=INCISION_CHOICES, blank=True, null=True) #: Limit choices to INCISION_CHOICES transplant_side = models.CharField( verbose_name=_('RE29 transplant side'), max_length=1, choices=LOCATION_CHOICES, blank=True) #: Limit choices to LOCATION_CHOICES arterial_problems = models.PositiveSmallIntegerField( verbose_name=_('RE30 arterial problems'), choices=ARTERIAL_PROBLEM_CHOICES, blank=True, null=True) #: Limit choices to ARTERIAL_PROBLEM_CHOICES arterial_problems_other = models.CharField( verbose_name=_('RE31 arterial problems other'), max_length=250, blank=True) venous_problems = models.PositiveSmallIntegerField( verbose_name=_('RE32 venous problems'), choices=VENOUS_PROBLEM_CHOICES, blank=True, null=True) #: Limit choices to VENOUS_PROBLEM_CHOICES venous_problems_other = models.CharField( verbose_name=_('RE33 venous problems other'), max_length=250, blank=True) anastomosis_started_at = models.DateTimeField( verbose_name=_('RE34 start anastomosis at'), blank=True, null=True, validators=[validate_between_1900_2050, validate_not_in_future], help_text="Date must be fall within 1900-2050, and not be in the future" ) anastomosis_started_at_unknown = models.BooleanField( default=False, help_text="Internal unknown flag") reperfusion_started_at = models.DateTimeField( verbose_name=_('RE35 start reperfusion at'), blank=True, null=True, validators=[validate_between_1900_2050, validate_not_in_future], help_text="Date must be fall within 1900-2050, and not be in the future" ) reperfusion_started_at_unknown = models.BooleanField( default=False, help_text="Internal unknown flag") mannitol_used = models.PositiveSmallIntegerField( verbose_name=_('RE36 mannitol used'), choices=YES_NO_UNKNOWN_CHOICES, blank=True, null=True) #: Limit choices to YES_NO_UNKNOWN_CHOICES other_diurectics = models.PositiveSmallIntegerField( verbose_name=_('RE37 other diurectics used'), choices=YES_NO_UNKNOWN_CHOICES, blank=True, null=True) #: Limit choices to YES_NO_UNKNOWN_CHOICES other_diurectics_details = models.CharField( verbose_name=_('RE38 other diurectics detail'), max_length=250, blank=True) systolic_blood_pressure = models.PositiveSmallIntegerField( verbose_name=_('RE39 systolic blood pressure at reperfusion'), validators=[MinValueValidator(10), MaxValueValidator(200)], blank=True, null=True, help_text="Value must be in range 10-200") cvp = models.PositiveSmallIntegerField( verbose_name=_('RE40 cvp at reperfusion'), blank=True, null=True) intra_operative_diuresis = models.PositiveSmallIntegerField( verbose_name=_('RE41 intra-operative diuresis'), choices=YES_NO_UNKNOWN_CHOICES, blank=True, null=True) #: Limit choices to YES_NO_UNKNOWN_CHOICES successful_conclusion = models.BooleanField( verbose_name=_("RE42 successful conclusion"), default=False) operation_concluded_at = models.DateTimeField( verbose_name=_("RE43 operation concluded at"), null=True, blank=True, validators=[validate_between_1900_2050, validate_not_in_future], help_text="Date must be fall within 1900-2050, and not be in the future" ) # Machine cleanup record probe_cleaned = models.NullBooleanField( verbose_name=_('RE44 temperature and flow probe cleaned'), blank=True, null=True) ice_removed = models.NullBooleanField( verbose_name=_('RE45 ice and water removed'), blank=True, null=True) oxygen_flow_stopped = models.NullBooleanField( verbose_name=_('RE46 oxygen flow stopped'), blank=True, null=True) oxygen_bottle_removed = models.NullBooleanField( verbose_name=_('RE47 oxygen bottle removed'), blank=True, null=True) box_cleaned = models.NullBooleanField( verbose_name=_('RE48 box kidney assist cleaned'), blank=True, null=True) batteries_charged = models.NullBooleanField( verbose_name=_('RE49 batteries charged'), blank=True, null=True) cleaning_log = models.TextField(verbose_name=_("RE50 cleaning log notes"), blank=True) objects = RecipientModelForUserManager() class Meta: order_with_respect_to = 'organ' verbose_name = _('REm1 recipient') verbose_name_plural = _('REm2 recipients') get_latest_by = 'pk' permissions = ( # ("view_recipient", "Can only view the data"), # Replaced by Django 2.1 functionality ("restrict_to_national", "Can only use data from the same location country"), ("restrict_to_local", "Can only use data from a specific location"), ) def country_for_restriction(self): """ Get the country to be used for geographic restriction of this data :return: Int: Value from list in Locations.Models. Should be in range [1,4,5] """ return self.allocation.country_for_restriction def location_for_restriction(self): """ Get the location to be used for geographic restriction of this data :return: Int: Hospital object id """ return self.allocation.location_for_restriction def clean(self): """ Clears the following fields of data if their corresponding unknown flag is set to True * anastomosis_started_at * reperfusion_started_at Error if transplantation_form_completed is True, and: * perfusion_stopped is empty (REv01) * removed_from_machine_at is empty (REv02) * anesthesia_started_at is empty (REv03) * anastomosis_started_at is empty (REv04) * reperfusion_started_at is empty (REv05) """ # Clean the fields that at Not Known if self.anastomosis_started_at_unknown: self.anastomosis_started_at = None if self.reperfusion_started_at_unknown: self.reperfusion_started_at = None if self.organ.transplantation_form_completed: # Things to check if the form is being marked as complete... # if self.perfusion_stopped is None: # RE20 -- Removed for Issue #104 # raise ValidationError(_("REv01 Missing time machine perfusion stopped")) if self.anesthesia_started_at is None: # RE27 raise ValidationError( _("REv03 Missing Start time of anaesthesia")) if self.anastomosis_started_at is None and self.anastomosis_started_at_unknown is False: # RE34 raise ValidationError( _("REv04 Missing Anastomosis Start Time")) # Modifed v02 and v05 based on Issue #146 if not self.organ.was_cold_stored: if self.removed_from_machine_at is None: # RE23 raise ValidationError( _("REv02 Missing time kidney removed from machine")) if self.reperfusion_started_at is None and self.reperfusion_started_at_unknown is False: # RE35 raise ValidationError( _("REv05 Missing Reperfusion Start Time")) def __str__(self): return "#{0}: {1} ({3}) with trial id {2}".format( self.id, self.person.number, self.trial_id, self.person.id) def _age_from_dob(self): """ Returns the calculated age of the Recipient :return: Recipient's age in years as calculated from their Date of Birth :rtype: int """ return self.person.age_from_dob age_from_dob = cached_property(_age_from_dob, name='age_from_dob') def _trial_id(self): """ Returns the Donor Trial ID combined with the Location (L or R) for the Organ :return: 'WP4cctnns' :rtype: str """ return self.organ.trial_id trial_id = cached_property(_trial_id, name='trial_id') def _baseline_qol(self): """ Returns the Baseline Quality of Life record for the Recipient (i.e. one not linked also to a Follow Up) :return: Quality of Life record(s) :rtype: QualityOfLife """ from wp4.health_economics.models import QualityOfLife return QualityOfLife.objects.filter(recipient=self.id, followup_3m=None, followup_1y=None) baseline_qol = cached_property(_baseline_qol, name='baseline_qol')
class Trainee(User): class Meta: proxy = True ordering = ['firstname', 'lastname'] objects = TraineeManager() inactive = InactiveTraineeManager() # for groupslips, create a schedule named 'Group Events' filled with group events (located in static/react/scripts/testdata/groupevents.js) @property def group_schedule(self): return self.schedules.filter(trainee_select='GP').order_by('priority') @property def active_schedules(self): return self.schedules.filter( Q(is_deleted=False) & (Q(season=Term.current_season()) | Q(season='All')) & ~Q(trainee_select='GP')).order_by('priority') # rolls for current term @property def current_rolls(self): c_term = Term.current_term() rolls = self.rolls.filter(date__gte=c_term.start, date__lte=c_term.end) return rolls def __unicode__(self): try: return "%s %s" % (self.firstname, self.lastname) except AttributeError as e: return str(self.id) + ": " + str(e) def get_attendance_record(self, period=None): from leaveslips.models import GroupSlip c_term = Term.current_term() rolls = self.rolls.exclude(status='P').filter( date__gte=c_term.start, date__lte=c_term.end).order_by( 'event', 'date').distinct('event', 'date').prefetch_related('event') ind_slips = self.individualslips.filter(status__in=['A', 'S']) att_record = [] # list of non 'present' events excused_timeframes = [] # list of groupslip time ranges excused_rolls = [] # prevents duplicate rolls def attendance_record(att, start, end, event): return { 'attendance': att, 'start': start, 'end': end, 'event': event, } def reformat(slip): s = str( datetime.combine(slip['rolls__date'], slip['rolls__event__start'])).replace( ' ', 'T') e = str( datetime.combine(slip['rolls__date'], slip['rolls__event__end'])).replace(' ', 'T') return (s, e) group_slips = GroupSlip.objects.filter(trainees=self, status__in=['A', 'S']) rolls = rolls.order_by('event__id', 'date').distinct( 'event__id', 'date') # may not need to order if period is not None: # works without period, but makes calculate_summary really slow p = Period(c_term) start_date = p.start(period) end_date = p.end(period) startdt = datetime.combine(start_date, datetime.min.time()) enddt = datetime.combine(end_date, datetime.max.time()) rolls = rolls.filter( date__gte=start_date, date__lte=end_date) # rolls for current period ind_slips = ind_slips.filter( rolls__in=[d['id'] for d in rolls.values('id')]) group_slips = group_slips.filter(start__lte=enddt, end__gte=startdt) rolls = rolls.values('event__id', 'event__start', 'event__end', 'event__name', 'status', 'date') ind_slips = ind_slips.values('rolls__event__id', 'rolls__event__start', 'rolls__event__end', 'rolls__date', 'rolls__event__name', 'id') excused_timeframes = group_slips.values('start', 'end') # first, individual slips for slip in ind_slips: if slip['rolls__event__id'] is None: continue start, end = reformat(slip) att_record.append( attendance_record('E', start, end, slip['rolls__event__id'])) excused_rolls.append( (slip['rolls__event__id'], slip['rolls__date'])) for roll in rolls: excused = False for excused_roll in excused_rolls: if roll['event__id'] == excused_roll[0] and roll[ 'date'] == excused_roll[ 1]: # Check if roll is excused using the roll's event and the roll's date excused = True break if excused is False: if roll['status'] == 'A': # absent rolls att_record.append( attendance_record( 'A', str(roll['date']) + 'T' + str(roll['event__start']), str(roll['date']) + 'T' + str(roll['event__end']), roll['event__id'])) else: # tardy rolls att_record.append( attendance_record( 'T', str(roll['date']) + 'T' + str(roll['event__start']), str(roll['date']) + 'T' + str(roll['event__end']), roll['event__id'])) # now, group slips for record in att_record: if record['event'] is None: continue if record['attendance'] != 'E': start_dt = parser.parse(record['start']) end_dt = parser.parse(record['end']) for tf in excused_timeframes: if EventUtils.time_overlap(start_dt, end_dt, tf['start'], tf['end']): record['attendance'] = 'E' return att_record attendance_record = cached_property(get_attendance_record) def calculate_summary(self, period): """this function examines the Schedule belonging to trainee and search through all the Events and Rolls. Returns the number of summary a trainee needs to be assigned over the given period.""" num_A = 0 num_T = 0 num_summary = 0 att_rcd = self.get_attendance_record(period=period) for event in att_rcd: if event['attendance'] == 'A': num_A += 1 elif event['attendance'] == 'T': num_T += 1 if num_A >= 2: num_summary += max(num_A, 0) if num_T >= 5: num_summary += max(num_T - 3, 0) return num_summary num_summary = cached_property(calculate_summary) # Get events in date range (handles ranges that span multi-weeks) # Returns event list sorted in timestamp order # If you want to sort by name, use event_list.sort(key=operator.attrgetter('name')) def events_in_date_range(self, start, end, listOfSchedules=[]): # check for generic group calendar if listOfSchedules: schedules = listOfSchedules else: schedules = self.active_schedules # figure out which weeks are in the date range. c_term = Term.current_term() start_week = c_term.term_week_of_date(start) end_week = c_term.term_week_of_date(end) w_tb = OrderedDict() # for every schedule, filter events to get events in the date range. for schedule in schedules: # create week table for date range that covers more than one week. if end_week - start_week > 0: # covers first week. evs = schedule.events.filter( Q(weekday__gte=start.weekday())).order_by( 'weekday', 'start', 'end') weeks = [start_week] w_tb = EventUtils.compute_prioritized_event_table( w_tb, weeks, evs, schedule.priority) # covers weeks between first and last week. evs = schedule.events.all().order_by('weekday', 'start', 'end') weeks = range(start_week + 1, end_week) w_tb = EventUtils.compute_prioritized_event_table( w_tb, weeks, evs, schedule.priority) # covers last week. evs = schedule.events.filter( Q(weekday__lte=end.weekday())).order_by( 'weekday', 'start', 'end') weeks = [end_week] w_tb = EventUtils.compute_prioritized_event_table( w_tb, weeks, evs, schedule.priority) # create week table for date range that covers only one week. else: evs = schedule.events.filter( weekday__gte=start.weekday(), weekday__lte=end.weekday()).order_by( 'weekday', 'start', 'end') weeks = range(start_week, end_week + 1) w_tb = EventUtils.compute_prioritized_event_table( w_tb, weeks, evs, schedule.priority) # create event list. return EventUtils.export_event_list_from_table(w_tb, start_datetime=start, end_datetime=end) # Get the current event trainee (Attendance Monitor) is in or will be in 15 minutes window before after right now!! def immediate_upcoming_event(self, time_delta=15, with_seating_chart=False): # Code for debugging # Turn this boolean to test locally and receive valid event on page load every time test_ev_with_chart = False if test_ev_with_chart: from schedules.models import Event ev = Event.objects.filter(chart__isnull=False)[0] date = ev.date_for_week(3) # calc date from w ev.start_datetime = datetime.combine(date, ev.start) ev.end_datetime = datetime.combine(date, ev.end) return [ ev, ] # Actual code starts below schedules = self.active_schedules c_time = datetime.now() delay = timedelta(minutes=time_delta) start_time = c_time + delay end_time = c_time - delay c_term = Term.current_term() weeks = set([int(c_term.term_week_of_date(c_time.date()))]) w_tb = OrderedDict() for schedule in schedules: evs = schedule.events.filter( Q(weekday=c_time.weekday()) | Q(day=c_time.date())).filter( start__lte=start_time, end__gte=end_time) if with_seating_chart: evs = evs.filter(chart__isnull=False) schedule_weeks = set(map(int, schedule.weeks.split(','))) w_tb = EventUtils.compute_prioritized_event_table( w_tb, weeks & schedule_weeks, evs, schedule.priority) # print w_tb return EventUtils.export_event_list_from_table(w_tb) @cached_property def events(self): schedules = self.active_schedules w_tb = OrderedDict() # create week table for schedule in schedules: evs = schedule.events.all() weeks = [int(x) for x in schedule.weeks.split(',')] w_tb = EventUtils.compute_prioritized_event_table( w_tb, weeks, evs, schedule.priority) # return all the calculated, composite, priority/conflict resolved list of events return EventUtils.export_event_list_from_table(w_tb) # events in list of weeks def events_in_week_list(self, weeks): schedules = self.active_schedules w_tb = OrderedDict() for schedule in schedules: evs = schedule.events.all() w_tb = EventUtils.compute_prioritized_event_table( w_tb, weeks, evs, schedule.priority) # return all the calculated, composite, priority/conflict resolved list of events return EventUtils.export_event_list_from_table(w_tb) @cached_property def groupevents(self): return self.groupevents_in_week_range() def groupevents_in_week_list(self, weeks): schedules = self.group_schedule w_tb = OrderedDict() # create week table for schedule in schedules: evs = schedule.events.all() weeks = [int(x) for x in schedule.weeks.split(',')] w_tb = EventUtils.compute_prioritized_event_table( w_tb, weeks, evs, schedule.priority) # return all the calculated, composite, priority/conflict resolved list of events return EventUtils.export_event_list_from_table(w_tb) def groupevents_in_week_range(self, start_week=0, end_week=19): weeks = [int(x) for x in range(start_week, end_week + 1)] return self.groupevents_in_week_list(weeks)
class PipelineTrack(models.Model): pipeline_name = models.TextField(db_index=True, null=True) item_content_type = models.ForeignKey(ContentType, related_name='+') item_id = models.PositiveIntegerField(null=True, blank=True) created_info = JSONField(default=dict) trigger_from_name = models.TextField(db_index=True, null=True) result_content_type = models.ForeignKey(ContentType, null=True, blank=True, related_name='+') result_id = models.PositiveIntegerField(null=True, blank=True) created_date = models.DateTimeField(auto_now_add=True) def get_item(self, key_attr): print key_attr item_id = getattr(self, key_attr + "_id") item_ct = getattr(self, key_attr + "_content_type_id") if not item_id or not item_ct: return None return model_ct_map[item_ct].objects.get(id=item_id) item = cached_property(curry(get_item, key_attr="item")) result = cached_property(curry(get_item, key_attr="result")) class Meta(object): index_together = [("item_content_type", "item_id")] @classmethod def by_objects(cls, name, item, objects, result=None, trigger_from_name=None): if not objects: return None data = dict(pipeline_name=name, created_info=group_by( objects, lambda obj: ct_model_map[obj.__class__], lambda obj: obj.id).items(), trigger_from_name=trigger_from_name) data["item_content_type_id"], data["item_id"] = cls._get_generic_info( item) data["result_content_type_id"], data[ "result_id"] = cls._get_generic_info(result) return cls(**data) @classmethod def _get_generic_info(cls, obj): if isinstance(obj, models.Model): return ct_model_map[obj.__class__], obj.id elif isinstance(obj, type) and issubclass(obj, models.Model): return ct_model_map[obj], None return None, None @classmethod def _revert(cls, created_info): for ct, ids in created_info: try: model = model_ct_map[ct] except KeyError: continue objs_queryset = model.objects.filter(id__in=ids) objs_queryset._raw_delete(objs_queryset.db) def revert(self): self._revert(self.created_info) self.delete() @classmethod def revert_batch(cls, *trackers): created_infos = defaultdict(list) tracker_ids = [tracker.id for tracker in trackers] for tracker in trackers: for item_content_type_id, ids in tracker.created_info: created_infos[item_content_type_id] += ids if created_infos: cls._revert(created_infos.items()) cls.objects.filter(id__in=tracker_ids).delete() def keep_and_delete(self): items = {} for ct, ids in self.created_info: model = model_ct_map[ct] objs_queryset = model.objects.filter(id__in=ids) items[model] = list(objs_queryset) objs_queryset._raw_delete(objs_queryset.db) self.old_data = items PipelineTrack.objects.filter(id=self.id).update(reverted=True) return self def re_insert(self): for model, data in self.old_data.iteritems(): if not data: continue model.objects.bulk_create(data)
def __new__(mcls, name, bases, namespace, **kwargs): if '__doc__' not in namespace: namespace['__doc__'] = cached_property(mcls.__value_doc__) return super().__new__(mcls, name, bases, namespace, **kwargs)
def process_request(self, request): type(request).cart = cached_property(cart) type(request).save_cart = save_cart
class Event(CustomEventMixin, TranslatedAutoSlugifyMixin, TranslationHelperMixin, TranslatableModel): # TranslatedAutoSlugifyMixin options slug_source_field_name = 'title' slug_default = _('untitled-event') # when True, updates the event's search_data field # whenever the event is saved or a plugin is saved # on the event's content placeholder. update_search_on_save = getattr(settings, 'EVENTS_UPDATE_SEARCH_DATA_ON_SAVE', False) translations = TranslatedFields( title=models.CharField(_('title'), max_length=234), slug=models.SlugField( verbose_name=_('slug'), max_length=255, db_index=True, blank=True, help_text=_('Used in the URL. If changed, the URL will change. ' 'Clear it to have it re-created automatically.'), ), lead_in=HTMLField( verbose_name=_('Summary'), default='', help_text=_( 'The Summary gives the reader the main idea of the story, this ' 'is useful in overviews, lists or as an introduction to your ' 'event.'), blank=True, ), location=HTMLField( verbose_name=_('Location'), default='', blank=True, ), display_location=models.CharField( _('Display Location'), max_length=255, null=True, blank=True, ), meta_title=models.CharField(max_length=255, verbose_name=_('meta title'), blank=True, default=''), meta_description=models.TextField(verbose_name=_('meta description'), blank=True, default=''), meta_keywords=models.TextField(verbose_name=_('meta keywords'), blank=True, default=''), meta={'unique_together': (( 'language_code', 'slug', ), )}, search_data=models.TextField(blank=True, editable=False), is_published_trans=models.BooleanField(_('is published'), default=False, db_index=True), is_featured_trans=models.BooleanField(_('is featured'), default=False, db_index=True), ) price = models.CharField(max_length=255, verbose_name=_('Event Price'), blank=True, default='') cpd_points = models.CharField(max_length=255, verbose_name=_('CPD Points'), blank=True, default='') event_start = models.DateTimeField(_('Event start'), default=now) event_end = models.DateTimeField(_('Event end'), null=True, blank=True) latitude = models.DecimalField(max_digits=8, decimal_places=5, verbose_name=_('Event latitude'), blank=True, null=True) longitude = models.DecimalField(max_digits=8, decimal_places=5, verbose_name=_('Event longitude'), blank=True, null=True) host = models.ForeignKey(Person, on_delete=models.SET_NULL, null=True, blank=True, verbose_name=_('host')) host_2 = models.ForeignKey(Person, on_delete=models.SET_NULL, related_name='host_2', null=True, blank=True, verbose_name=_('second host')) host_3 = models.ForeignKey(Person, on_delete=models.SET_NULL, related_name='host_3', null=True, blank=True, verbose_name=_('third host')) registration_until = models.DateTimeField(_('Allow registration until'), blank=True, null=True) registration_content = PlaceholderField( 'Hide After Happened', related_name='events_event_registration_content') sidebar = PlaceholderField('Event Sidebar', related_name='events_event_sidebar') registration_link = models.CharField( max_length=255, verbose_name=_('Registration link'), blank=True, default='', help_text=_('link to an external registration system'), ) external_link = models.CharField( max_length=255, verbose_name=_('External link'), blank=True, default='', help_text=_('link to an external registration system'), ) link_text = models.CharField( max_length=255, verbose_name=_('Link Text'), blank=True, default='', help_text=_( 'Text to appear on either the Registration Link or External Link'), ) redirect_url = models.CharField( max_length=255, verbose_name=_('Redirect URL'), blank=True, default='', help_text= _('when this value is filled in the Event page does not load, it redirects to the entered url' ), ) content = PlaceholderField('Event Content', related_name='events_event_content') app_config = AppHookConfigField( EventsConfig, verbose_name=_('Section'), help_text='', ) template = models.CharField( max_length=255, verbose_name=_('Event template'), blank=True, default='', ) channel = models.ForeignKey(Channel, on_delete=models.SET_NULL, null=True, blank=True, verbose_name=_('channel')) categories = CategoryManyToManyField(Category, verbose_name=_('categories'), blank=True) publishing_date = models.DateTimeField(_('publishing date'), default=now) is_published = models.BooleanField(_('is published'), default=False, db_index=True) is_featured = models.BooleanField(_('is featured'), default=False, db_index=True) hero_event = models.BooleanField(_('Hero Event'), default=False, db_index=True) featured_image = FilerImageField( verbose_name=_('featured image'), null=True, blank=True, on_delete=models.SET_NULL, ) share_image = FilerImageField( verbose_name=_('social share image'), null=True, blank=True, on_delete=models.SET_NULL, help_text= 'This image will only be shown on social channels. Minimum size: 1200x630px', related_name='+') show_on_sitemap = models.BooleanField(_('Show on sitemap'), null=False, default=True) show_on_xml_sitemap = models.BooleanField(_('Show on xml sitemap'), null=False, default=True) noindex = models.BooleanField(_('noindex'), null=False, default=False) nofollow = models.BooleanField(_('nofollow'), null=False, default=False) canonical_url = models.CharField(blank=True, null=True, max_length=255, verbose_name=_('Canonical URL')) custom_fields = JSONField(blank=True, null=True) # Setting "symmetrical" to False since it's a bit unexpected that if you # set "B relates to A" you immediately have also "A relates to B". It have # to be forced to False because by default it's True if rel.to is "self": # # https://github.com/django/django/blob/1.8.4/django/db/models/fields/related.py#L2144 # # which in the end causes to add reversed releted-to entry as well: # # https://github.com/django/django/blob/1.8.4/django/db/models/fields/related.py#L977 services = SortedManyToManyField('js_services.Service', verbose_name=_('services'), blank=True) locations = SortedManyToManyField('js_locations.location', verbose_name=_('locations'), blank=True) objects = RelatedManager() all_objects = AllManager() search_objects = SearchManager() class Meta: ordering = ['-event_start'] def get_class(self): '''Return class name''' return self.__class__.__name__ @property def type(self): '''Event Type / Section.''' return self.app_config @property def type_slug(self): '''Event Type / Section Machine Name''' return self.app_config.namespace @property def published(self): """ Returns True only if the event (is_published == True) AND has a published_date that has passed. """ language = get_current_language() return self.published_for_language(language) def published_for_language(self, language): if TRANSLATE_IS_PUBLISHED: return ((self.safe_translation_getter('is_published_trans', language_code=language, any_language=False) or False) and self.publishing_date <= now()) return (self.is_published and self.publishing_date <= now()) @property def future(self): """ Returns True if the event is published but is scheduled for a future date/time. """ if TRANSLATE_IS_PUBLISHED: return ((self.safe_translation_getter('is_published_trans', language_code=language, any_language=False) or False) and self.publishing_date > now()) return (self.is_published and self.publishing_date > now()) @property def upcoming(self): return self.event_start > now() @property def past(self): return self.event_start > now() @property def show_registration_content(self): return (self.registration_until or self.event_start) > now() @property def start_date(self): return self.event_start.date() @property def start_time(self): return self.event_start.time() @property def end_date(self): if self.event_end: return self.event_end.date() @property def end_time(self): if self.event_end: return self.event_end.time() @property def hosts(self): hosts = [] if self.host and self.host.published: hosts.append(self.host) if self.host_2 and self.host_2.published: hosts.append(self.host_2) if self.host_3 and self.host_3.published: hosts.append(self.host_3) return hosts def get_absolute_url(self, language=None): """Returns the url for this Event in the selected permalink format.""" if not language: language = get_current_language() kwargs = {} permalink_type = self.app_config.permalink_type if 'y' in permalink_type: kwargs.update(year=self.publishing_date.year) if 'm' in permalink_type: kwargs.update(month="%02d" % self.publishing_date.month) if 'd' in permalink_type: kwargs.update(day="%02d" % self.publishing_date.day) if 'i' in permalink_type: kwargs.update(pk=self.pk) if 's' in permalink_type: slug, lang = self.known_translation_getter('slug', default=None, language_code=language) if slug and lang: site_id = getattr(settings, 'SITE_ID', None) if get_redirect_on_fallback(language, site_id): language = lang kwargs.update(slug=slug) if self.app_config and self.app_config.namespace: namespace = '{0}:'.format(self.app_config.namespace) else: namespace = EventsConfig.default_namespace with override(language): return reverse('{0}event-detail'.format(namespace), kwargs=kwargs) def get_public_url(self, language=None): if not language: language = get_current_language() if not TRANSLATE_IS_PUBLISHED and self.published: return self.get_absolute_url(language) if (TRANSLATE_IS_PUBLISHED and \ (self.safe_translation_getter('is_published_trans', language_code=language, any_language=False) or False) and \ self.publishing_date <= now()): return self.get_absolute_url(language) return '' def get_search_data(self, language=None, request=None): """ Provides an index for use with Haystack, or, for populating Event.translations.search_data. """ if not self.pk: return '' if language is None: language = get_current_language() if request is None: request = get_request(language=language) title = self.safe_translation_getter('title', '') description = self.safe_translation_getter('lead_in', '') location = self.safe_translation_getter('location', '') text_bits = [title, strip_tags(description), strip_tags(location)] for category in self.categories.all(): text_bits.append( force_unicode(category.safe_translation_getter('name'))) for service in self.services.all(): text_bits.append( force_unicode(service.safe_translation_getter('title'))) if self.content: plugins = self.content.cmsplugin_set.filter(language=language) for base_plugin in plugins: plugin_text_content = ' '.join( get_plugin_index_data(base_plugin, request)) text_bits.append(plugin_text_content) return ' '.join(text_bits) def save(self, *args, **kwargs): # Update the search index if self.update_search_on_save: self.search_data = self.get_search_data() # slug would be generated by TranslatedAutoSlugifyMixin super(Event, self).save(*args, **kwargs) def __str__(self): return self.safe_translation_getter('title', any_language=True) def get_placeholders(self): return [ self.content, self.registration_content, self.sidebar, ] def _get_related_qs(self, queryset): queryset = queryset.exclude(pk=self.pk).order_by('-event_start') if self.services.exists(): return queryset.filter(services__in=self.services.all()).distinct() elif self.categories.exists(): return queryset.filter( categories__in=self.categories.all()).distinct() else: return queryset.filter(app_config=self.app_config) def related_events(self): return self._get_related_qs(Event.objects.published()) def related_upcoming_events(self): return self._get_related_qs(Event.objects.upcoming()) def related_past_events(self): return self._get_related_qs(Event.objects.past()) cached_related_events = cached_property(related_events, name='cached_related_events') cached_related_upcoming_events = cached_property( related_upcoming_events, name='cached_related_upcoming_events') cached_related_past_events = cached_property( related_past_events, name='cached_related_past_events')
class ZhixiangTraining(models.Model): user = models.OneToOneField(settings.AUTH_USER_MODEL, models.CASCADE, related_name='zhixiang_training_data', verbose_name='用户') examination = models.ForeignKey('ZhixiangExamination', models.PROTECT, null=True, verbose_name='关联考试') A = Choices( (1, '未参与课程调研'), (2, '已参与课程调研'), ) a_status = models.IntegerField(choices=A, verbose_name='课程调研状态', default=1) a_start = models.DateTimeField(null=True, verbose_name='课程调研开始时间') a_end = models.DateTimeField(null=True, verbose_name='课程调研结束时间') B = Choices( # 默认处于未学完课程的状态,代码中,当读取到状态为 1 时,应该重新根据实际情况计算用户是否已学完 # 如果发现已学完,就写入数据库,以后就不用再次计算了 (1, '未学完课程'), (2, '已学完课程'), ) b_status = models.IntegerField(choices=B, verbose_name='培训课程状态', default=1) C = Choices( (1, '未参与资格认证'), (2, '已填写问卷星、等待审核'), (3, '审核通过'), (4, '审核未通过'), ) c_status = models.IntegerField(choices=C, verbose_name='资格认证状态', default=1) c_start = models.DateTimeField(null=True, verbose_name='资格认证开始时间') c_end = models.DateTimeField(null=True, verbose_name='资格认证结束时间') D = Choices( (1, '未参与考试'), (2, '已参与考试'), ) d_status = models.IntegerField(choices=D, verbose_name='考试评定状态', default=1) d_start = models.DateTimeField(null=True, verbose_name='考试评定开始时间') d_end = models.DateTimeField(null=True, verbose_name='考试评定结束时间') def get_b(self): # 如果之前已经将状态标记为了已学完相应课程,那么就永远都是已学完 if self.b_status == 2: return self.b_status # 2019.07.03 经过沟通,以下逻辑不再需要。课程学习状态与资质审核无关 ## 如果没有通过资质审核,那么一定处于 b1 #if not self.c3: # return 1 # 如果未关联考试,那么就没有相应的课程, if not self.examination: return 1 # 否则,计算用户学习情况,如果计算结果为已学完,那么需要把 b_status 设置为 2 并保存入数据库 course = self.examination.course course.fetch_presentationlesson_details(self.user) if course.all_presentationlessons_watched: self.b_status = 2 self.save(update_fields=['b_status']) return 2 return 1 a = property(lambda self: self.a_status) a1 = property(lambda self: self.a == 1) a2 = property(lambda self: self.a == 2) b = cached_property(lambda self: self.get_b()) b1 = property(lambda self: self.b == 1) b2 = property(lambda self: self.b == 2) c = property(lambda self: self.c_status) c1 = property(lambda self: self.c == 1) c2 = property(lambda self: self.c == 2) c3 = property(lambda self: self.c == 3) c4 = property(lambda self: self.c == 4) d = property(lambda self: self.d_status) d1 = property(lambda self: self.d == 1) d2 = property(lambda self: self.d == 2) def set_timestamp(self, field_name): # 设置 x_start, x_end 等时间戳字段,一次只能设置一个,会自动保存数据库 # NOTE 这是一个内部函数,我们暂时不检查错误(比如传入了错误的 field_name) # NOTE 时间戳仅设置一次,如果一个时间戳已经有值了,就不在设置 if not getattr(self, field_name): setattr(self, field_name, timezone.now()) self.save(update_fields=[field_name]) def serialize(self, to_dict=True): data = dict( id=self.id, user=self.user.serialize(), a_status=self.a_status, a_start=serialize_datetime(self.a_start), a_end=serialize_datetime(self.a_end), b_status=self.b, c_status=self.c_status, c_start=serialize_datetime(self.c_start), c_end=serialize_datetime(self.c_end), d_status=self.d_status, d_start=serialize_datetime(self.d_start), d_end=serialize_datetime(self.d_end), examination=self.examination.serialize() if self.examination else None, ) return data if to_dict else json.dumps(data, ensure_ascii=False)
from django.utils.functional import cached_property from cms.toolbar import toolbar from djangocms_versioning.plugin_rendering import ( VersionContentRenderer, VersionStructureRenderer, ) def content_renderer(self): return VersionContentRenderer(request=self.request) toolbar.CMSToolbar.content_renderer = cached_property( content_renderer) # noqa: E305 def structure_renderer(self): return VersionStructureRenderer(request=self.request) toolbar.CMSToolbar.structure_renderer = cached_property( structure_renderer) # noqa: E305