class BaseProduct(PolymorphicModel): name = models.CharField(verbose_name='Name/model', max_length=30) price = models.FloatField(verbose_name='Price') quantity = models.IntegerField(verbose_name='Quantity', validators=[MinValueValidator(0)]) recommended = models.BooleanField( default=False) # recommended products are show on home page additional_spec = models.CharField( verbose_name='Additional specifications', max_length=200, blank=True, null=True) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) objects = PolymorphicManager() class Meta: db_table = 'products' verbose_name = 'Base Product' verbose_name_plural = 'Base Products' def __str__(self): return self.name
def get_queryset(self): # Re-apply the logic from django-polymorphic and django-mptt. # As of django-mptt 0.7, TreeManager.get_querset() no longer calls super() # In django qs = PolymorphicManager.get_queryset( self) # can filter on proxy models return qs.order_by(self.tree_id_attr, self.left_attr)
class TemplateContent(BaseContentMixin): """ Model for template related content BaseContentMixin is Polymorphic, allowing you to get all contents within few SQL queries (one per child models) Attributes: name (CharField): If you need to name your template for UX sugar path (CharField): Your template path, as used in "render" or include/extend """ template = models.ForeignKey( "stack_it.Template", verbose_name=_("Template"), related_name="contents", on_delete=models.CASCADE, ) objects = PolymorphicManager() class Meta: verbose_name = _("Template") verbose_name_plural = _("Template") unique_together = (("template", "key"),) def __str__(self): return self.key
class Message(PolymorphicModel): sender = models.ForeignKey(get_user_model(), verbose_name=_('sender'), related_name='sent_messages', on_delete=models.PROTECT, help_text='The user who authored the message') parent = models.ForeignKey( 'self', verbose_name=_('parent'), related_name='replies', on_delete=models.CASCADE, null=True, blank=True, help_text='The main message in a message thread') content = models.TextField(_('content')) # Default fields. Used for record-keeping. uuid = models.UUIDField(default=uuid.uuid4, editable=False) slug = models.SlugField(_('slug'), max_length=250, unique=True, editable=False, blank=True) created_at = models.DateTimeField(_('created at'), auto_now_add=True, editable=False) updated_at = models.DateTimeField(_('uploaded at'), auto_now=True, editable=False) objects = PolymorphicManager() custom_objects = MessageManager() class Meta: db_table = 'vestlus_messages' indexes = [models.Index(fields=['created_at', 'sender', 'parent'])] ordering = ['-created_at', 'sender'] @property def preview(self): return f'{truncatewords(self.content, 20)}...' @property def avatar(self): try: return self.sender.photo.url except AttributeError: return 'https://github.com/octocat.png' def save(self, *args, **kwargs): self.slug = slugify(f'{str(self.uuid)[-12:]}') super().save(*args, **kwargs) def __str__(self): return f'{self.slug}' def get_absolute_url(self): return reverse('vestlus:message-detail', kwargs={'slug': self.slug})
class Base(AdminLinkMixin, PolymorphicModel): slug = models.SlugField() objects = PolymorphicManager() if django.VERSION < (2, 0): class Meta: manager_inheritance_from_future = True def __str__(self): return "Base %s" % self.slug
class Filter(PolymorphicModel): name = I18nCharField(max_length=60) order = django_models.PositiveIntegerField(default=0) objects = PolymorphicManager.from_queryset(FilterQuerySet)() def __str__(self): return f'{self.name} filter' class Meta: ordering = ['order']
class BaseTask(PolymorphicModel): """ An external task to be processed by work units. Use this as the base class for process-engine specific task definitions. """ topic_name = models.CharField( _("topic name"), max_length=255, help_text=_( "Topics determine which functions need to run for a task."), ) variables = JSONField(default=dict) status = models.CharField( _("status"), max_length=50, choices=Statuses.choices, default=Statuses.initial, help_text=_("The current status of task processing"), ) result_variables = JSONField(default=dict) execution_error = models.TextField( _("execution error"), blank=True, help_text=_("The error that occurred during execution."), ) logs = GenericRelation(TimelineLog, related_query_name="task") objects = PolymorphicManager.from_queryset(BaseTaskQuerySet)() def get_variables(self) -> dict: """ return input variables formatted for work_unit """ return self.variables def request_logs(self) -> models.QuerySet: return self.logs.filter( extra_data__has_key="request").order_by("-timestamp") def status_logs(self) -> models.QuerySet: return self.logs.filter( extra_data__has_key="status").order_by("-timestamp") def __str__(self): return f"{self.polymorphic_ctype}: {self.topic_name} / {self.id}"
class BaseLesson(PolymorphicModel): name = CharField(max_length=64) course_section = ForeignKey(CourseSection, on_delete=CASCADE, related_name="lessons") description = TextField(blank=True) objects = PolymorphicManager.from_queryset(BaseLessonQuerySet)() class Meta: order_with_respect_to = "course_section" def is_completed_by(self, user: User) -> bool: # is_completed is annotated in with_completed_annotations queryset method. # However, if the queryset was not annotated, there's a fallback that performs this check. # Keep in mind it is far less efficient. if (completed := getattr(self, "is_completed", None)) is not None: return completed return CompletedLesson.objects.filter(lesson=self, user=user).exists()
class PageContent(BaseContentMixin): """ Model for page related content BaseContentMixin is Polymorphic, allowing you to get all contents within few SQL queries (one per child models) Attributes: page (ForeignKey): Page instance where content belongs """ page = models.ForeignKey( "stack_it.Page", verbose_name=_("Page"), related_name="contents", on_delete=models.CASCADE, ) objects = PolymorphicManager() class Meta: verbose_name = _("Page Content") verbose_name_plural = _("Page Contents") unique_together = (("page", "key"),)
class GoodsNomenclatureDescription(DescriptionMixin, TrackedModel): record_code = "400" subrecord_code = "15" period_record_code = "400" period_subrecord_code = "10" identifying_fields = ("sid", ) objects = PolymorphicManager.from_queryset(DescriptionQueryset)() sid = NumericSID() described_goods_nomenclature = models.ForeignKey( GoodsNomenclature, on_delete=models.PROTECT, related_name="descriptions", ) description = LongDescription() indirect_business_rules = (business_rules.NIG12, ) business_rules = (UniqueIdentifyingFields, UpdateValidity) class Meta: ordering = ("validity_start", )
class Measure(TrackedModel, ValidityMixin): """ Defines the validity period in which a particular measure type is applicable to particular nomenclature for a particular geographical area. Measures in the TARIC database are stored against the nomenclature code which is at the highest level appropriate in the hierarchy. Thus, measures which apply to all the declarable codes in a complete chapter are stored against the nomenclature code for the chapter (i.e. at the 2-digit level only); those which apply to all sub-divisions of an HS code are stored against that HS code (i.e. at the 6-digit level only). The advantage of this system is that it reduces the number of measures stored in the database; the data capture workload (thus diminishing the possibility of introducing errors) and the transmission volumes. """ record_code = "430" subrecord_code = "00" sid = SignedIntSID(db_index=True) measure_type = models.ForeignKey(MeasureType, on_delete=models.PROTECT) geographical_area = models.ForeignKey( "geo_areas.GeographicalArea", on_delete=models.PROTECT, related_name="measures", ) goods_nomenclature = models.ForeignKey( "commodities.GoodsNomenclature", on_delete=models.PROTECT, related_name="measures", null=True, blank=True, ) additional_code = models.ForeignKey( "additional_codes.AdditionalCode", on_delete=models.PROTECT, null=True, blank=True, ) dead_additional_code = models.CharField( max_length=16, null=True, blank=True, db_index=True, ) order_number = models.ForeignKey( "quotas.QuotaOrderNumber", on_delete=models.PROTECT, null=True, blank=True, ) dead_order_number = models.CharField( max_length=6, validators=[quota_order_number_validator], null=True, blank=True, db_index=True, ) reduction = models.PositiveSmallIntegerField( validators=[validators.validate_reduction_indicator], null=True, blank=True, db_index=True, ) generating_regulation = models.ForeignKey( "regulations.Regulation", on_delete=models.PROTECT, ) terminating_regulation = models.ForeignKey( "regulations.Regulation", on_delete=models.PROTECT, related_name="terminated_measures", null=True, blank=True, ) stopped = models.BooleanField(default=False) export_refund_nomenclature_sid = SignedIntSID(null=True, blank=True, default=None) footnotes = models.ManyToManyField( "footnotes.Footnote", through="FootnoteAssociationMeasure", ) identifying_fields = ("sid",) indirect_business_rules = ( business_rules.MA4, business_rules.MC3, business_rules.ME42, business_rules.ME49, business_rules.ME61, business_rules.ME65, business_rules.ME66, business_rules.ME67, business_rules.ME71, business_rules.ME73, ) business_rules = ( business_rules.ME1, business_rules.ME2, business_rules.ME3, business_rules.ME4, business_rules.ME5, business_rules.ME6, business_rules.ME7, business_rules.ME8, business_rules.ME88, business_rules.ME16, business_rules.ME115, business_rules.ME25, business_rules.ME32, business_rules.ME10, business_rules.ME116, business_rules.ME119, business_rules.ME9, business_rules.ME12, business_rules.ME17, business_rules.ME24, business_rules.ME87, business_rules.ME33, business_rules.ME34, business_rules.ME40, business_rules.ME45, business_rules.ME46, business_rules.ME47, business_rules.ME109, business_rules.ME110, business_rules.ME111, business_rules.ME104, ) objects = PolymorphicManager.from_queryset(MeasuresQuerySet)() validity_field_name = "db_effective_valid_between" @property def effective_end_date(self): """Measure end dates may be overridden by regulations.""" # UK measures will have explicit end dates only # if self.national: # return self.valid_between.upper reg = self.generating_regulation effective_end_date = ( date( reg.effective_end_date.year, reg.effective_end_date.month, reg.effective_end_date.day, ) if reg.effective_end_date else None ) if self.valid_between.upper and reg and effective_end_date: if self.valid_between.upper > effective_end_date: return effective_end_date return self.valid_between.upper if self.valid_between.upper and self.terminating_regulation: return self.valid_between.upper if reg: return effective_end_date return self.valid_between.upper @property def effective_valid_between(self): return TaricDateRange(self.valid_between.lower, self.effective_end_date) @classmethod def objects_with_validity_field(cls): return super().objects_with_validity_field().with_effective_valid_between() def has_components(self): return ( MeasureComponent.objects.approved_up_to_transaction( transaction=self.transaction, ) .filter(component_measure__sid=self.sid) .exists() ) def has_condition_components(self): return ( MeasureConditionComponent.objects.approved_up_to_transaction( transaction=self.transaction, ) .filter(condition__dependent_measure__sid=self.sid) .exists() ) def get_conditions(self): return MeasureCondition.objects.filter( dependent_measure__sid=self.sid, ).latest_approved() def terminate(self, workbasket, when: date): """ Returns a new version of the measure updated to end on the specified date. If the measure would not have started on that date, the measure is deleted instead. If the measure will already have ended by this date, then does nothing. """ starts_after_date = self.valid_between.lower >= when ends_before_date = ( not self.valid_between.upper_inf and self.valid_between.upper < when ) if ends_before_date: return self update_params = {} if starts_after_date: update_params["update_type"] = UpdateType.DELETE else: update_params["update_type"] = UpdateType.UPDATE update_params["valid_between"] = TaricDateRange( lower=self.valid_between.lower, upper=when, ) if not self.terminating_regulation: update_params["terminating_regulation"] = self.generating_regulation return self.new_draft(workbasket, **update_params)
class ModelWithMyManagerDefault(ShowFieldTypeAndContent, Model2A): my_objects = MyManager() objects = PolymorphicManager() field4 = models.CharField(max_length=10)
class MROBase3(models.Model): objects = PolymorphicManager()
class ProductCard(PolymorphicModel, OfferPage, Image, Weighable, Dimensional): class Meta: # abstract = True pass # Custom managers objects = PolymorphicManager() public = PublicProductCardManager() # KEY-CLASSES attributevalues_relation_class = None attributevalue_class = None product_model_class = None # KEYS AND M2M attribute_values = None product_model = None # Image upload_image_to = None image_key_attribute = 'get_image_name' # Search update_search = False vendor_code = models.CharField(verbose_name="Артикул", max_length=1024, db_index=True, unique=True, validators=[ RegexValidator( regex=r'^[-_a-z\d]+$', message='vendor_code valid error', ) ]) product_type = models.CharField( verbose_name="Тип продукта", max_length=128, choices=(("", "Не задано"), ("BACKPACK", "BACKPACK"), ("BAG", "BAG"), ("SUITCASE", "SUITCASE"), ("PURSE", "PURSE")), default="", blank=True, ) @property def data_for_delivery(self): result = { "product_type": self.product_type, "price": self.price, "purchase_price": self.purchase_price, "vendor": self.vendor } if self.weigh: result['weigh'] = math.ceil(self.weigh / 1000) if self.dimensions: result['dimensions'] = self.dimensions return result product_aditional_image_class = None @property def get_image_name(self): return self.model.replace('/', '-') model = models.CharField(verbose_name="Модель", max_length=1024, db_index=True) def get_url(self): return self.slug @disallowed_before_creation def add_attribute_value(self, attribute_value_instance): if attribute_value_instance.pk: try: self.attributevalues_relation_class.objects.create( product=self, attribute_value=attribute_value_instance) except IntegrityError: pass else: raise DisallowedBeforeCreationException( 'attributes_value_instance must be creatied') @disallowed_before_creation def del_attribute_value(self, attribute_value_instance): if attribute_value_instance.pk: try: self.attributevalues_relation_class.objects.get( product=self, attribute_value=attribute_value_instance).delete() except ObjectDoesNotExist: pass else: raise DisallowedBeforeCreationException( 'attribute_value_instance must be creatied') @disallowed_before_creation def clear_attribute_value(self): self.attributevalues_relation_class.objects.filter( product=self).delete() @property @disallowed_before_creation def attributes(self): result = OrderedDict() for av in self.attribute_values.select_related().order_by( '-attribute__order', '-order'): result[av.attribute] = result.get(av.attribute, []) + [av] return result @property @disallowed_before_creation def additional_images(self): return self.product_aditional_image_class.objects.filter( product=self).order_by('order', 'id') @property def public_id(self): return self.get_public_id() @property def has_photo(self): return self.image != self.image.field.default @property @disallowed_before_creation def photo_checksums(self): checksums = set() images = self.product_aditional_image_class.objects.filter( product=self, ) if self.has_photo: checksums.add(md5_file_checksum(self.image.path)) for image in images: checksums.add(md5_file_checksum(image.image.path)) return checksums def generate_image_from_main(self): max_order = self.product_aditional_image_class.objects.filter( product=self, ).aggregate(Max('order'))['order__max'] if max_order is None: max_order = 0 else: max_order += 1 instance = self.product_aditional_image_class( product=self, image=self.image, order=max_order, ) instance.save() def replace_main_image_by_file(self, image_file): self.generate_image_from_main() self.image = image_file update_search = not settings.DEBUG self.save(update_search=update_search) def replace_main_image_by_additional(self, image_id): try: instance = self.product_aditional_image_class.objects.get( id=image_id) self.generate_image_from_main() self.image = instance.image update_search = not settings.DEBUG self.save(update_search=update_search) instance.delete() except self.product_aditional_image_class.DoesNotExist: pass def add_photo(self, filename): fp = open(filename, 'rb') img_file = File(fp) if self.has_photo: instance = self.product_aditional_image_class( product=self, image=img_file, ) instance.save() else: self.image = img_file self.save() fp.close() def get_public_id(self): msg = "Method get_public_id() must be implemented by subclass: `{}`" raise NotImplementedError(msg.format(self.__class__.__name__)) def get_scoring(self): scoring = 0 if self.has_photo: if self.scoring != 0: scoring = self.scoring else: scoring = random.randint(100, 150) return scoring def get_image_url(self): return self.image.url def get_modifications(self): if self.product_model is not None: return self.product_model.products.exclude(id=self.id) return [] @property def modifications(self): return self.get_modifications() def __str__(self): return 'Product card: {0}'.format(self.slug) def save(self, update_search=False, *args, **kwargs): self.update_search = update_search self.scoring = self.get_scoring() super(ProductCard, self).save(*args, **kwargs)
class Hook(PolymorphicModel, BaseConcurrentModel): name = models.CharField(primary_key=True, max_length=255) objects = PolymorphicManager()
class MROBase3(models.Model): base_3_id = models.AutoField( primary_key=True ) # make sure 'id' field doesn't clash, detected by Django 1.11 objects = PolymorphicManager()
class BaseResource(PolymorphicModel, BaseConcurrentModel): # name = models.CharField(u'资源名', max_length=64, blank=False, null=False) departments = models.ManyToManyField(Department, blank=True) objects = PolymorphicManager()
class BaseAccount(ShowFieldTypeAndContent, PolymorphicModel): user = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, on_delete=models.CASCADE, related_name='accounts_for_user') type = models.CharField(max_length=100) account_id = models.CharField(max_length=100) taxable = models.BooleanField(default=True) display_name = models.CharField(max_length=100, default='') joint_share = models.DecimalField(default=1, decimal_places=3, max_digits=6) creation_date = models.DateField(default='2009-01-01') objects = PolymorphicManager.from_queryset(BaseAccountQuerySet)() activitySyncDateRange = 30 class Meta: ordering = ['account_id'] def __repr__(self): return "BaseAccount({},{},{})".format(self.user, self.account_id, self.type) def __str__(self): return self.display_name @cached_property def cur_cash_balance(self): query = self.holdingdetail_set.cash().today().total_values() if query: return query.first()[1] return 0 @cached_property def cur_balance(self): return self.GetValueToday() @cached_property def yesterday_balance(self): return self.GetValueAtDate(datetime.date.today() - datetime.timedelta(days=1)) @cached_property def today_balance_change(self): return self.cur_balance - self.yesterday_balance @cached_property def sync_from_date(self): last_activity = self.activities.newest_date() if last_activity: return last_activity + datetime.timedelta(days=1) return self.creation_date def import_activities(self, csv_file): activity_count = self.activities.all().count() self.import_from_csv(csv_file) if self.activities.all().count() > activity_count: self.RegenerateHoldings() Security.objects.Sync(False) def import_from_csv(self, csv_file): """ Override this to enable transaction uploading from csv. Subclasses are expected to parse the csv and create the necessary BaseRawActivity subclasses. """ pass def SyncAndRegenerate(self): activity_count = self.activities.all().count() if self.activitySyncDateRange: date_range = utils.dates.day_intervals(self.activitySyncDateRange, self.sync_from_date) print('Syncing all activities for {} in {} chunks.'.format( self, len(date_range))) for period in date_range: self.CreateActivities(period.start, period.end) if self.activities.all().count() > activity_count: self.RegenerateHoldings() def RegenerateActivities(self): self.activities.all().delete() with transaction.atomic(): for raw in self.rawactivities.all(): raw.CreateActivity() self.RegenerateHoldings() def RegenerateHoldings(self): self.holding_set.all().delete() for activity in self.activities.all(): for security, qty_delta in activity.GetHoldingEffects().items(): self.holding_set.add_effect(self, security, qty_delta, activity.trade_date) self.holding_set.filter(qty=0).delete() def CreateActivities(self, start, end): """ Retrieve raw activity data for the specified account and start/end period. Store it in the DB as a subclass of BaseRawActivity. Return the newly created raw instances. """ return [] def GetValueAtDate(self, date): result = self.holdingdetail_set.at_date(date).total_values().first() if result: return result[1] return 0 def GetValueToday(self): return self.GetValueAtDate(datetime.date.today())
class Source(PolymorphicModel): """ A project source. """ project = models.ForeignKey( Project, null=True, blank=True, on_delete=NON_POLYMORPHIC_CASCADE, related_name="sources", ) address = models.TextField( null=False, blank=False, help_text="The address of the source. e.g. github://org/repo/subpath", ) path = models.TextField( null=True, blank=True, help_text= "The file or folder name, or path, that the source is mapped to.", ) order = models.PositiveIntegerField( null=True, blank=True, help_text="The order that the source should be pulled into the project " "(allows explicit overwriting of one source by another).", ) creator = models.ForeignKey( User, null=True, # Should only be null if the creator is deleted blank=True, on_delete=models.SET_NULL, related_name="sources_created", help_text="The user who created the source.", ) created = models.DateTimeField( auto_now_add=True, help_text="The time the source was created.") updated = models.DateTimeField( auto_now=True, help_text="The time the source was last changed.") subscription = models.JSONField( null=True, blank=True, help_text= "Information from the source provider (e.g. Github, Google) when a watch subscription was created.", ) jobs = models.ManyToManyField( Job, help_text= "Jobs associated with this source. e.g. pull, push or convert jobs.", ) # The default object manager which will fetch data from multiple # tables (one query for each type of source) objects = PolymorphicManager() # An additional manager which will only fetch data from the `Source` # table. objects_base = models.Manager() class Meta: constraints = [ # Constraint to ensure that sources are not duplicated within a # project. Note that sources can share the same `path` e.g. # two sources both targetting a `data` folder. models.UniqueConstraint(fields=["project", "address"], name="%(class)s_unique_project_address") ] def __str__(self) -> str: return self.address @property def type_class(self) -> str: """ Get the name the class of a source instance. If this is an instance of a derived class then returns that class name. Otherwise, fetches the name of the model class based on the `polymorphic_ctype_id` (with caching) and then cases it properly. """ if self.__class__.__name__ != "Source": return self.__class__.__name__ all_lower = ContentType.objects.get_for_id( self.polymorphic_ctype_id).model for source_type in SourceTypes: if source_type.name.lower() == all_lower: return source_type.name return "" @property def type_name(self) -> str: """ Get the name of the type of a source instance. The `type_name` is intended for use in user interfaces. This base implementation simply drops `Source` from the end of the name. Can be overridden in derived classes if necessary. """ return self.type_class[:-6] @staticmethod def class_from_type_name(type_name: str) -> Type["Source"]: """Find the class matching the type name.""" for source_type in SourceTypes: if source_type.name.lower().startswith(type_name.lower()): return source_type.value raise ValueError( 'Unable to find class matching "{}"'.format(type_name)) def make_address(self) -> str: """ Create a human readable string representation of the source instance. These are intended to be URL-like human-readable and -writable shorthand representations of sources (e.g. `github://org/repo/sub/path`; `gdoc://378yfh2yg362...`). They are used in admin lists and in API endpoints to allowing quick specification of a source (e.g. for filtering). Should be parsable by the `parse_address` method of each subclass. """ return "{}://{}".format(self.type_name.lower(), self.id) @staticmethod def coerce_address(address: Union[SourceAddress, str]) -> SourceAddress: """ Coerce a string into a `SourceAddress`. If the `address` is already an instance of `SourceAddress` it will be returned unchanged. Otherwise, the `parse_address` method of each source type (ie. subclass of `Source`) will be called. The first class that returns a `SourceAddress` wins. """ if isinstance(address, SourceAddress): return address for source_type in SourceTypes: result = source_type.value.parse_address(address) if result is not None: return result raise ValueError('Unable to parse source address "{}"'.format(address)) @classmethod def parse_address(cls, address: str, naked: bool = False, strict: bool = False) -> Optional[SourceAddress]: """ Parse a string into a `SourceAddress`. This default implementation just matches the default string representation of a source, as generated by `Source.__str__`. Derived classes should override this method to match their own `__str__` method. """ type_name = cls.__name__[:-6] if address.startswith(type_name.lower() + "://"): return SourceAddress(type_name) if strict: raise ValidationError( "Invalid source identifier: {}".format(address)) return None @staticmethod def query_from_address( address_or_string: Union[SourceAddress, str], prefix: Optional[str] = None, ) -> models.Q: """ Create a query object for a source address. Given a source address, constructs a Django `Q` object which can be used in ORM queries. Use the `prefix` argument when you want to use this function for a related field. For example, Source.query_from_address({ "type": "Github", "repo": "org/repo", "subpath": "folder" }, prefix="sources") is equivalent to : Q( sources__githubsource__repo="org/repo", sources__githubsource__subpath="folder" ) """ address = Source.coerce_address(address_or_string) front = ("{}__".format(prefix) if prefix else "") + address.type.__name__.lower() kwargs = dict([("{}__{}".format(front, key), value) for key, value in address.items()]) return models.Q(**kwargs) @staticmethod def from_address(address_or_string: Union[SourceAddress, str], **other) -> "Source": """ Create a source instance from a source address. Given a source address, creates a new source instance of the specified `type` with fields set from the address. The `other` parameter can be used to set additional fields on the created source. """ address = Source.coerce_address(address_or_string) return address.type.objects.create(**address, **other) def to_address(self) -> SourceAddress: """ Create a source address from a source instance. The inverse of `Source.from_address`. Used primarily to create a pull job for a source. """ all_fields = [field.name for field in self._meta.fields] class_fields = [ name for name in self.__class__.__dict__.keys() if name in all_fields ] return SourceAddress( self.type_name, **dict([(name, value) for name, value in self.__dict__.items() if name in class_fields]), ) def get_secrets(self, user: User) -> Dict: """ Get any secrets required to pull or push the source. This method should be overridden by derived classes to return a secrets (e.g. OAuth tokens, API keys) specific to the source (if necessary). """ return {} def get_url(self, path: Optional[str] = None): """ Create a URL for users to visit the source on the external site. path: An optional path to a file within the source (for multi-file sources such as GitHub repos). """ raise NotImplementedError def get_event_url(self): """ Create a URL for source events to be sent to. This is the Webhook URL used by Github, Google and other source providers to send event data to when a source has been `watch()`ed. """ return ("https://" + settings.PRIMARY_DOMAIN + reverse( "api-projects-sources-event", kwargs=dict(project=self.project.id, source=self.id), )) def pull(self, user: Optional[User] = None) -> Job: """ Pull the source to the filesystem. Creates a job, and adds it to the source's `jobs` list. """ source = self.to_address() source["type"] = source.type_name description = "Pull {0}" if self.type_class == "UploadSource": description = "Collect {0}" description = description.format(self.address) job = Job.objects.create( project=self.project, creator=user or self.creator, method=JobMethod.pull.value, params=dict(source=source, path=self.path), description=description, secrets=self.get_secrets(user), **Job.create_callback(self, "pull_callback"), ) self.jobs.add(job) return job @transaction.atomic def pull_callback(self, job: Job): """ Update the files associated with this source. """ result = job.result if not result: return from projects.models.files import File, get_modified # All existing files for the source are made "non-current" (i.e. will not # be displayed in project working directory but are retained for history) File.objects.filter(project=self.project, source=self).update(current=False, updated=timezone.now()) # Do a batch insert of files. This is much faster when there are a lot of file # than inserting each file individually. File.objects.bulk_create([ File( project=self.project, path=path, job=job, source=self, updated=timezone.now(), modified=get_modified(info), size=info.get("size"), mimetype=info.get("mimetype"), encoding=info.get("encoding"), fingerprint=info.get("fingerprint"), ) for path, info in result.items() ]) # Asynchronously check whether the project's image needs to be updated given # that there are updated files. update_image_for_project.delay(project_id=self.project.id) def extract(self, review, user: Optional[User] = None, filters: Optional[Dict] = None) -> Job: """ Extract a review from a project source. Creates a job, and adds it to the source's `jobs` list. Note: the jobs callback is `Review.extract_callback`. """ source = self.to_address() source["type"] = source.type_name description = "Extract review from {0}" description = description.format(self.address) job = Job.objects.create( project=self.project, creator=user or self.creator, method=JobMethod.extract.value, params=dict(source=source, filters=filters), description=description, secrets=self.get_secrets(user), **Job.create_callback(review, "extract_callback"), ) self.jobs.add(job) return job def push(self) -> Job: """ Push from the filesystem to the source. Creates a `Job` having the `push` method and a dictionary of `source` attributes sufficient to push it. This may include authentication tokens. """ raise NotImplementedError( "Push is not implemented for class {}".format( self.__class__.__name__)) def watch(self, user: User): """ Create a subscription to listen to events for the source. """ raise NotImplementedError( "Watch is not implemented for class {}".format( self.__class__.__name__)) def unwatch(self, user: User): """ Remove a subscription to listen to events for the source. """ raise NotImplementedError( "Unwatch is not implemented for class {}".format( self.__class__.__name__)) def event(self, data: dict, headers: dict = {}): """ Handle an event notification. Passes on the event to the project's event handler with this source added to the context. """ self.project.event(data=data, source=self) def preview(self, user: User) -> Job: """ Generate a HTML preview of a source. Creates a `series` job comprising a `pull` job followed by a `convert` job. """ preview = Job.objects.create(project=self.project, creator=user, method="series") preview.children.add(self.pull(user)) preview.children.add(self.convert(user, to="html")) return preview # Properties related to jobs. Provide shortcuts for # obtaining info such as the files created in the last pull, # or the time of the last push @property def is_active(self) -> bool: """ Is the source currently active. A source is considered active if it has an active job. The `since` parameter is included to ignore old, orphaned jobs which may have not have had their status updated. """ since = datetime.timedelta(minutes=15) return (self.jobs.filter( is_active=True, created__gte=timezone.now() - since).count() > 0) def get_downstreams(self, current=True): """ Get the downstream files. The equivalent of `File.get_downstreams()`. """ return self.files.filter(current=current) def get_jobs(self, n=10) -> List[Job]: """ Get the jobs associated with this source. """ return self.jobs.order_by("-created").select_related("creator")[:n] def save(self, *args, **kwargs): """ Save the source. An override to ensure necessary fields are set. """ if not self.address: self.address = self.make_address() return super().save(*args, **kwargs) def delete(self, *args, **kwargs): """ Delete the source. Override to unwatch a source before deleting it. This avoids the source provider continuing to send events for the source when it no longer exists. """ if self.subscription: # Because we do not have the quest user here, use # the source creator for the unwatch and log any # exceptions as warnings only. try: self.unwatch(self.creator) except Exception as exc: logger.warning(str(exc), exc_info=True) return super().delete(*args, **kwargs)
class GeographicalArea(TrackedModel, ValidityMixin, DescribedMixin): """ A Geographical Area covers three distinct types of object: 1) A Country 2) A Region (a trading area which is not recognised as a country) 3) A Grouping of the above These objects are generally used when linked to data structures such as measures. As a measure does not care to distinguish between a country, region or group, the 3 types are stored as one for relational purposes. As a country or region can belong to a group there is a self-referential many-to-many field which is restricted. Yet groups can also have parent groups - in which case measures must of the parent must also apply to a child. To accomodate this there is a separate foreign key for group to group relations. """ record_code = "250" subrecord_code = "00" identifying_fields = ("sid", ) url_pattern_name_prefix = "geo_area" sid = SignedIntSID(db_index=True) area_id = models.CharField(max_length=4, validators=[area_id_validator]) area_code = models.PositiveSmallIntegerField(choices=AreaCode.choices) # This deals with countries and regions belonging to area groups memberships = models.ManyToManyField("self", through="GeographicalMembership") # This deals with subgroups of other groups parent = models.ForeignKey("self", on_delete=models.PROTECT, null=True, blank=True) objects = PolymorphicManager.from_queryset(GeographicalAreaQuerySet)() indirect_business_rules = ( business_rules.GA14, business_rules.GA16, business_rules.GA17, measures_business_rules.ME1, measures_business_rules.ME65, measures_business_rules.ME66, measures_business_rules.ME67, quotas_business_rules.ON13, quotas_business_rules.ON14, quotas_business_rules.ON6, ) business_rules = ( business_rules.GA1, business_rules.GA3, business_rules.GA4, business_rules.GA5, business_rules.GA6, business_rules.GA7, business_rules.GA10, business_rules.GA11, business_rules.GA21, business_rules.GA22, UniqueIdentifyingFields, UpdateValidity, ) def get_current_memberships(self): return (GeographicalMembership.objects.filter( Q(geo_group__sid=self.sid) | Q(member__sid=self.sid), ).current().select_related( "member", "geo_group")) def is_single_region_or_country(self): return self.area_code == AreaCode.COUNTRY or self.area_code == AreaCode.REGION def is_all_countries(self): return self.area_code == AreaCode.GROUP and self.area_id == "1011" def is_group(self): return self.area_code == AreaCode.GROUP def __str__(self): return f"{self.get_area_code_display()} {self.area_id}" class Meta: constraints = (CheckConstraint( name="only_groups_have_parents", check=Q(area_code=1) | Q(parent__isnull=True), ), )
class MeasureCondition(TrackedModel): """ A measure may be dependent on conditions. These are expressed in a series of conditions, each having zero or more components. Conditions for the same condition type will have sequence numbers. Conditions of different types may be combined. """ record_code = "430" subrecord_code = "10" sid = SignedIntSID(db_index=True) dependent_measure = models.ForeignKey( Measure, on_delete=models.PROTECT, related_name="conditions", ) condition_code = models.ForeignKey( MeasureConditionCode, on_delete=models.PROTECT, related_name="conditions", ) component_sequence_number = models.PositiveSmallIntegerField( validators=[validators.validate_component_sequence_number], ) duty_amount = models.DecimalField( max_digits=10, decimal_places=3, null=True, blank=True, ) monetary_unit = models.ForeignKey( MonetaryUnit, on_delete=models.PROTECT, null=True, blank=True, ) condition_measurement = models.ForeignKey( Measurement, on_delete=models.PROTECT, null=True, blank=True, ) action = models.ForeignKey( MeasureAction, on_delete=models.PROTECT, null=True, blank=True, ) required_certificate = models.ForeignKey( "certificates.Certificate", on_delete=models.PROTECT, null=True, blank=True, ) objects = PolymorphicManager.from_queryset(MeasureConditionQuerySet)() indirect_business_rules = ( business_rules.MA2, business_rules.MC4, business_rules.ME53, ) business_rules = ( business_rules.MC3, business_rules.MA4, business_rules.ME56, business_rules.ME57, business_rules.ME58, business_rules.ME59, business_rules.ME60, business_rules.ME61, business_rules.ME62, business_rules.ME63, business_rules.ME64, ) class Meta: ordering = [ "dependent_measure__sid", "condition_code__code", "component_sequence_number", ] def is_certificate_required(self): return self.condition_code.code in ("A", "B", "C", "H", "Q", "Y", "Z") @property def description(self) -> str: out: list[str] = [] out.append( f"Condition of type {self.condition_code.code} - {self.condition_code.description}", ) if self.required_certificate: out.append( f"On presentation of certificate {self.required_certificate.code},", ) elif self.is_certificate_required(): out.append("On presentation of no certificate,") if self.reference_price_string: out.append(f"If reference price > {self.reference_price_string},") out.append(f"perform action {self.action.code} - {self.action.description}") if self.condition_string: out.append(f"\n\nApplicable duty is {self.condition_string}") return " ".join(out) @property def condition_string(self) -> str: out: list[str] = [] components = self.components.latest_approved() measures: set[str] = set() measure_types: set[str] = set() additional_codes: set[str] = set() for mcc in components: measures.add(mcc.condition.dependent_measure.sid) measure_types.add(mcc.condition.dependent_measure.measure_type.sid) if mcc.condition.dependent_measure.additional_code: additional_codes.add( mcc.condition.dependent_measure.additional_code.sid, ) if ( len(measures) == len(measure_types) == len(additional_codes) == 1 or len(measure_types) > 1 or len(additional_codes) > 1 ): out.append(self.duty_sentence) return "".join(out)
class TrackedModel(PolymorphicModel): transaction = models.ForeignKey( "common.Transaction", on_delete=models.PROTECT, related_name="tracked_models", editable=False, ) update_type: validators.UpdateType = models.PositiveSmallIntegerField( choices=validators.UpdateType.choices, db_index=True, ) """ The change that was made to the model when this version of the model was authored. The first version should always have :data:`~validators.UpdateType.CREATE`, subsequent versions will have :data:`~validators.UpdateType.UPDATE` and the final version will have :data:`~validators.UpdateType.DELETE`. Deleted models that reappear for the same :attr:`identifying_fields` will have a new :attr:`version_group` created. """ version_group = models.ForeignKey( VersionGroup, on_delete=models.PROTECT, related_name="versions", ) """ Each version group contains all of the versions of the same logical model. When a new version of a model is authored (e.g. to :data:`~validators.UpdateType.DELETE` it) a new model row is created and added to the same version group as the existing model being changed. Models are identified logically by their :attr:`identifying_fields`, so within one version group all of the models should have the same values for these fields. """ objects: TrackedModelQuerySet = PolymorphicManager.from_queryset( TrackedModelQuerySet, )() business_rules: Iterable = () indirect_business_rules: Iterable = () record_code: int """ The type id of this model's type family in the TARIC specification. This number groups together a number of different models into 'records'. Where two models share a record code, they are conceptually expressing different properties of the same logical model. In theory each :class:`~common.transactions.Transaction` should only contain models with a single :attr:`record_code` (but differing :attr:`subrecord_code`.) """ subrecord_code: int """ The type id of this model in the TARIC specification. The :attr:`subrecord_code` when combined with the :attr:`record_code` uniquely identifies the type within the specification. The subrecord code gives the intended order for models in a transaction, with comparatively smaller subrecord codes needing to come before larger ones. """ identifying_fields: Iterable[str] = ("sid",) """ The fields which together form a composite unique key for each model. The system ID (or SID) field is normally the unique identifier of a TARIC model, but in places where this does not exist models can declare their own. (Note that because mutliple versions of each model will exist this does not actually equate to a ``UNIQUE`` constraint in the database.) """ taric_template = None def get_taric_template(self): """ Generate a TARIC XML template name for the given class. Any TrackedModel must be representable via a TARIC compatible XML record. """ if self.taric_template: return self.taric_template class_name = self.__class__.__name__ # replace namesLikeThis to names_Like_This name = re.sub(r"([a-z0-9])([A-Z])", r"\1_\2", class_name) # replace names_LIKEthis to names_like_this name = re.sub(r"([A-Z]{2,})([a-z0-9_])", r"\1_\2", name).lower() template_name = f"taric/{name}.xml" try: loader.get_template(template_name) except loader.TemplateDoesNotExist as e: raise loader.TemplateDoesNotExist( f"Taric template does not exist for {class_name}. All classes that " "inherit TrackedModel must either:\n" " 1) Have a matching taric template with a snake_case name matching " 'the class at "taric/{snake_case_class_name}.xml". In this case it ' f'should be: "{template_name}".\n' " 2) A taric_template attribute, pointing to the correct template.\n" " 3) Override the get_taric_template method, returning an existing " "template.", ) from e return template_name def new_draft(self, workbasket, save=True, **kwargs): cls = self.__class__ new_object_kwargs = { field.name: getattr(self, field.name) for field in self._meta.fields if field.name not in ( self._meta.pk.name, "transaction", "polymorphic_ctype", "trackedmodel_ptr", "id", ) } new_object_kwargs["update_type"] = validators.UpdateType.UPDATE new_object_kwargs.update(kwargs) if "transaction" not in new_object_kwargs: # Only create a transaction if the user didn't specify one. new_object_kwargs["transaction"] = workbasket.new_transaction() new_object = cls(**new_object_kwargs) if save: new_object.save() return new_object def get_versions(self): if hasattr(self, "version_group"): return self.version_group.versions.all() query = Q(**self.get_identifying_fields()) return self.__class__.objects.filter(query) def get_description(self): return self.get_descriptions().last() def get_descriptions(self, transaction=None) -> TrackedModelQuerySet: """ Get the latest descriptions related to this instance of the Tracked Model. If there is no Description relation existing a `NoDescriptionError` is raised. If a transaction is provided then all latest descriptions that are either approved or in the workbasket of the transaction up to the transaction will be provided. """ try: descriptions_model = self.descriptions.model except AttributeError as e: raise NoDescriptionError( f"Model {self.__class__.__name__} has no descriptions relation.", ) from e for field, model in descriptions_model.get_relations(): if isinstance(self, model): field_name = field.name break else: raise NoDescriptionError( f"No foreign key back to model {self.__class__.__name__} " f"found on description model {descriptions_model.__name__}.", ) filter_kwargs = { f"{field_name}__{key}": value for key, value in self.get_identifying_fields().items() } query = descriptions_model.objects.filter(**filter_kwargs).order_by( "valid_between", ) if transaction: return query.approved_up_to_transaction(transaction=transaction) return query.latest_approved() def identifying_fields_unique( self, identifying_fields: Optional[Iterable[str]] = None, ) -> bool: return ( self.__class__.objects.filter( **self.get_identifying_fields(identifying_fields) ) .latest_approved() .count() <= 1 ) def identifying_fields_to_string( self, identifying_fields: Optional[Iterable[str]] = None, ) -> str: field_list = [ f"{field}={str(value)}" for field, value in self.get_identifying_fields(identifying_fields).items() ] return ", ".join(field_list) def _get_version_group(self) -> VersionGroup: if self.update_type == validators.UpdateType.CREATE: return VersionGroup.objects.create() return self.get_versions().latest_approved().last().version_group def _can_write(self): return not ( self.pk and self.transaction.workbasket.status in WorkflowStatus.approved_statuses() ) def get_identifying_fields( self, identifying_fields: Optional[Iterable[str]] = None, ) -> dict[str, Any]: identifying_fields = identifying_fields or self.identifying_fields fields = {} for field in identifying_fields: value = self for layer in field.split("__"): value = getattr(value, layer) if value is None: break fields[field] = value return fields @property def structure_code(self): return str(self) @property def structure_description(self): description = None if hasattr(self, "descriptions"): description = self.get_descriptions().last() if description: # Get the actual description, not just the object description = description.description if hasattr(self, "description"): description = self.description return description or "-" @property def current_version(self) -> TrackedModel: current_version = self.version_group.current_version if current_version is None: raise self.__class__.DoesNotExist("Object has no current version") return current_version @classmethod def get_relations(cls) -> list[tuple[Field, type[TrackedModel]]]: """Find all foreign key and one-to-one relations on an object and return a list containing tuples of the field instance and the related model it links to.""" return [ (f, f.related_model) for f in cls._meta.get_fields() if (f.many_to_one or f.one_to_one) and not f.auto_created and f.concrete and f.model == cls and issubclass(f.related_model, TrackedModel) ] def __getattr__(self, item: str): """ Add the ability to get the current instance of a related object through an attribute. For example if a model is like so: .. code:: python class ExampleModel(TrackedModel): # must be a TrackedModel other_model = models.ForeignKey(OtherModel, on_delete=models.PROTECT) The latest version of the relation can be accessed via: .. code:: python example_model = ExampleModel.objects.first() example_model.other_model_current # Gets the latest version """ if item.endswith("_current"): field_name = item[:-8] if field_name in [field.name for field, _ in self.get_relations()]: return getattr(self, field_name).current_version return self.__getattribute__(item) @atomic def save(self, *args, force_write=False, **kwargs): if not force_write and not self._can_write(): raise IllegalSaveError( "TrackedModels cannot be updated once written and approved. " "If writing a new row, use `.new_draft` instead", ) if not hasattr(self, "version_group"): self.version_group = self._get_version_group() return_value = super().save(*args, **kwargs) if self.transaction.workbasket.status in WorkflowStatus.approved_statuses(): self.version_group.current_version = self self.version_group.save() return return_value def __str__(self): return ", ".join( f"{field}={getattr(self, field, None)}" for field in self.identifying_fields ) def __hash__(self): return hash(f"{__name__}.{self.__class__.__name__}") def get_url(self, action="detail"): kwargs = {} if action != "list": kwargs = self.get_identifying_fields() try: return reverse( f"{self.get_url_pattern_name_prefix()}-ui-{action}", kwargs=kwargs, ) except NoReverseMatch: return def get_url_pattern_name_prefix(self): prefix = getattr(self, "url_pattern_name_prefix", None) if not prefix: prefix = self._meta.verbose_name.replace(" ", "_") return prefix