class Migration(migrations.Migration): dependencies = [ ('django_analyses', '0010_auto_20210101_1614'), ] operations = [ migrations.AlterModelOptions( name='analysisversion', options={'ordering': ('analysis', '-title')}, ), migrations.AlterField( model_name='analysisversion', name='fixed_run_method_kwargs', field=models.JSONField( default=dict, help_text='Fixed run method keyword arguments'), ), migrations.AlterField( model_name='analysisversion', name='max_parallel', field=models.PositiveIntegerField( default=4, help_text='Maximal number of parallel executions'), ), migrations.AlterField( model_name='analysisversion', name='nested_results_attribute', field=models.CharField( blank=True, help_text= 'Name of an attribute to be returned or called in order to retreive the output dictionary', max_length=100, null=True), ), migrations.AlterField( model_name='analysisversion', name='run_method_key', field=models.CharField(default='run', help_text='Custom run method name', max_length=100), ), migrations.AlterField( model_name='inputdefinition', name='db_value_preprocessing', field=models.CharField( blank=True, help_text= "Calls or returns the specified input value's attribute when saved to the database", max_length=255, null=True, verbose_name='DB Value Preprocessing'), ), migrations.AlterField( model_name='inputdefinition', name='is_configuration', field=models.BooleanField( default=True, help_text= 'Whether this definition represents a configuration of the analysis (rather than data input)' ), ), migrations.AlterField( model_name='inputdefinition', name='run_method_input', field=models.BooleanField( default=False, help_text= 'Pass this input when calling the run method (and not at interface initialization)' ), ), migrations.AlterField( model_name='inputdefinition', name='value_attribute', field=models.CharField( blank=True, help_text= "Calls or returns the specified input value's attribute when passed to the interface", max_length=255, null=True), ), ]
class ConfigContextModel(models.Model, ConfigContextSchemaValidationMixin): """ A model which includes local configuration context data. This local data will override any inherited data from ConfigContexts. """ local_context_data = models.JSONField( encoder=DjangoJSONEncoder, blank=True, null=True, ) local_context_schema = models.ForeignKey( to="extras.ConfigContextSchema", on_delete=models.SET_NULL, null=True, blank=True, help_text="Optional schema to validate the structure of the data", ) # The local context data *may* be owned by another model, such as a GitRepository, or it may be un-owned local_context_data_owner_content_type = models.ForeignKey( to=ContentType, on_delete=models.CASCADE, limit_choices_to=FeatureQuery("config_context_owners"), default=None, null=True, blank=True, ) local_context_data_owner_object_id = models.UUIDField(default=None, null=True, blank=True) local_context_data_owner = GenericForeignKey( ct_field="local_context_data_owner_content_type", fk_field="local_context_data_owner_object_id", ) class Meta: abstract = True def get_config_context(self): """ Return the rendered configuration context for a device or VM. """ # always manually query for config contexts config_context_data = ConfigContext.objects.get_for_object(self).values_list("data", flat=True) # Compile all config data, overwriting lower-weight values with higher-weight values where a collision occurs data = OrderedDict() for context in config_context_data: data = deepmerge(data, context) # If the object has local config context data defined, merge it last if self.local_context_data: data = deepmerge(data, self.local_context_data) return data def clean(self): super().clean() # Verify that JSON data is provided as an object if self.local_context_data and type(self.local_context_data) is not dict: raise ValidationError({"local_context_data": 'JSON data must be in object form. Example: {"foo": 123}'}) if self.local_context_schema and not self.local_context_data: raise ValidationError({"local_context_schema": "Local context data must exist for a schema to be applied."}) # Validate data against schema self._validate_with_schema("local_context_data", "local_context_schema")
class ConfigContext(BaseModel, ChangeLoggedModel, ConfigContextSchemaValidationMixin): """ A ConfigContext represents a set of arbitrary data available to any Device or VirtualMachine matching its assigned qualifiers (region, site, etc.). For example, the data stored in a ConfigContext assigned to site A and tenant B will be available to a Device in site A assigned to tenant B. Data is stored in JSON format. """ name = models.CharField( max_length=100, ) # A ConfigContext *may* be owned by another model, such as a GitRepository, or it may be un-owned owner_content_type = models.ForeignKey( to=ContentType, on_delete=models.CASCADE, limit_choices_to=FeatureQuery("config_context_owners"), default=None, null=True, blank=True, ) owner_object_id = models.UUIDField(default=None, null=True, blank=True) owner = GenericForeignKey( ct_field="owner_content_type", fk_field="owner_object_id", ) weight = models.PositiveSmallIntegerField(default=1000) description = models.CharField(max_length=200, blank=True) is_active = models.BooleanField( default=True, ) schema = models.ForeignKey( to="extras.ConfigContextSchema", on_delete=models.SET_NULL, null=True, blank=True, help_text="Optional schema to validate the structure of the data", ) regions = models.ManyToManyField(to="dcim.Region", related_name="+", blank=True) sites = models.ManyToManyField(to="dcim.Site", related_name="+", blank=True) roles = models.ManyToManyField(to="dcim.DeviceRole", related_name="+", blank=True) device_types = models.ManyToManyField(to="dcim.DeviceType", related_name="+", blank=True) platforms = models.ManyToManyField(to="dcim.Platform", related_name="+", blank=True) cluster_groups = models.ManyToManyField(to="virtualization.ClusterGroup", related_name="+", blank=True) clusters = models.ManyToManyField(to="virtualization.Cluster", related_name="+", blank=True) tenant_groups = models.ManyToManyField(to="tenancy.TenantGroup", related_name="+", blank=True) tenants = models.ManyToManyField(to="tenancy.Tenant", related_name="+", blank=True) tags = models.ManyToManyField(to="extras.Tag", related_name="+", blank=True) data = models.JSONField(encoder=DjangoJSONEncoder) objects = ConfigContextQuerySet.as_manager() class Meta: ordering = ["weight", "name"] unique_together = [["name", "owner_content_type", "owner_object_id"]] def __str__(self): if self.owner: return f"[{self.owner}] {self.name}" return self.name def get_absolute_url(self): return reverse("extras:configcontext", kwargs={"pk": self.pk}) def clean(self): super().clean() # Verify that JSON data is provided as an object if type(self.data) is not dict: raise ValidationError({"data": 'JSON data must be in object form. Example: {"foo": 123}'}) # Validate data against schema self._validate_with_schema("data", "schema") # Check for a duplicated `name`. This is necessary because Django does not consider two NULL fields to be equal, # and thus if the `owner` is NULL, a duplicate `name` will not otherwise automatically raise an exception. if ( ConfigContext.objects.exclude(pk=self.pk) .filter(name=self.name, owner_content_type=self.owner_content_type, owner_object_id=self.owner_object_id) .exists() ): raise ValidationError({"name": "A ConfigContext with this name already exists."})
class Entity(models.Model): rating_total_score = models.PositiveIntegerField(null=True, blank=True) rating_number = models.PositiveIntegerField(null=True, blank=True) rating = models.DecimalField(null=True, blank=True, max_digits=3, decimal_places=1) created_time = models.DateTimeField(auto_now_add=True) edited_time = models.DateTimeField(auto_now=True) last_editor = models.ForeignKey(User, on_delete=models.SET_NULL, related_name='%(class)s_last_editor', null=True, blank=False) brief = models.TextField(_("简介"), blank=True, default="") other_info = models.JSONField(_("其他信息"), blank=True, null=True, encoder=DjangoJSONEncoder, default=dict) # source_url should include shceme, which is normally https:// source_url = models.URLField(_("URL"), max_length=500, unique=True) source_site = models.CharField(_("源网站"), choices=SourceSiteEnum.choices, max_length=50) class Meta: abstract = True constraints = [ models.CheckConstraint(check=models.Q(rating__gte=0), name='%(class)s_rating_lowerbound'), models.CheckConstraint(check=models.Q(rating__lte=10), name='%(class)s_rating_upperbound'), ] def get_absolute_url(self): raise NotImplementedError("Subclass should implement this method") def save(self, *args, **kwargs): """ update rating and strip source url scheme & querystring before save to db """ if self.rating_number and self.rating_total_score: self.rating = Decimal( str(round(self.rating_total_score / self.rating_number, 1))) elif self.rating_number is None and self.rating_total_score is None: self.rating = None else: raise IntegrityError() super().save(*args, **kwargs) def calculate_rating(self, old_rating, new_rating): if (not (self.rating and self.rating_total_score and self.rating_number) and (self.rating or self.rating_total_score or self.rating_number))\ or (not (self.rating or self.rating_number or self.rating_total_score) and old_rating is not None): raise IntegrityError("Rating integiry error.") if old_rating: if new_rating: # old -> new self.rating_total_score += (new_rating - old_rating) else: # old -> none if self.rating_number >= 2: self.rating_total_score -= old_rating self.rating_number -= 1 else: # only one rating record self.rating_number = None self.rating_total_score = None pass else: if new_rating: # none -> new if self.rating_number and self.rating_number >= 1: self.rating_total_score += new_rating self.rating_number += 1 else: # no rating record before self.rating_number = 1 self.rating_total_score = new_rating else: # none -> none pass def update_rating(self, old_rating, new_rating): """ @param old_rating: the old mark rating @param new_rating: the new mark rating """ self.calculate_rating(old_rating, new_rating) self.save() def get_tags_manager(self): """ Since relation between tag and entity is foreign key, and related name has to be unique, this method works like interface. """ raise NotImplementedError("Subclass should implement this method.") def get_marks_manager(self): """ Normally this won't be used. There is no ocassion where visitor can simply view all the marks. """ raise NotImplementedError("Subclass should implement this method.") def get_reviews_manager(self): """ Normally this won't be used. There is no ocassion where visitor can simply view all the reviews. """ raise NotImplementedError("Subclass should implement this method.") @classmethod def get_category_mapping_dict(cls): category_mapping_dict = {} for subclass in cls.__subclasses__(): category_mapping_dict[subclass.__name__.lower()] = subclass return category_mapping_dict @property def category_name(self): return self.__class__.__name__ @property def verbose_category_name(self): raise NotImplementedError("Subclass should implement this.")
class Weather(models.Model): """ Weather Model Defines the attributes of a weather """ location_name = models.CharField(max_length=255) temperature = models.CharField(max_length=255) wind = models.CharField(max_length=255) cloudiness = models.CharField(max_length=255) pressure = models.CharField(max_length=255) humidity = models.CharField(max_length=255) sunrise = models.CharField(max_length=255) sunset = models.CharField(max_length=255) geo_coordinates = models.CharField(max_length=255) requested_time = models.CharField(max_length=255) forecast = models.JSONField() def __init__(self, *args, **kwargs): """ Init method is used to extract data from OpenWeather response and transform to required Weather model. """ kwargs = self.__extract_transform(**kwargs) super().__init__(*args, **kwargs) def __extract_transform(self, **kwargs): sys = kwargs.get('sys', {}) main = kwargs.get('main', {}) timezone = kwargs.get('timezone', 0) coord = kwargs.get('coord', {}) return { 'location_name': '{0}, {1}'.format(kwargs.get('name', ''), sys.get('country', '')), 'temperature': '{0} °C'.format(main.get('temp', '')), 'wind': get_wind(wind=kwargs.get('wind', '')), 'cloudiness': kwargs.get('weather', [{}])[0].get('description', '').capitalize(), 'pressure': '{} hpa'.format(main.get('pressure', '')), 'humidity': '{}%'.format(main.get('humidity', '')), 'sunrise': get_hour(utc_time=sys.get('sunrise', 0), timezone=timezone), 'sunset': get_hour(utc_time=sys.get('sunset', 0), timezone=timezone), 'geo_coordinates': '[{}, {}]'.format(round(coord.get('lat', 0), 2), round(coord.get('lon', 0), 2)), 'requested_time': get_date(utc_time=kwargs.get('dt', 0), timezone=timezone) } def save(self, *args, **kwargs): """ Override the model's save method to prevent saving to the database """ pass class Meta: managed = False
class Application(CommonModelMixin, OrgModelMixin): name = models.CharField(max_length=128, verbose_name=_('Name')) category = models.CharField( max_length=16, choices=const.ApplicationCategoryChoices.choices, verbose_name=_('Category')) type = models.CharField(max_length=16, choices=const.ApplicationTypeChoices.choices, verbose_name=_('Type')) domain = models.ForeignKey( 'assets.Domain', null=True, blank=True, related_name='applications', on_delete=models.SET_NULL, verbose_name=_("Domain"), ) attrs = models.JSONField(default=dict, verbose_name=_('Attrs')) comment = models.TextField(max_length=128, default='', blank=True, verbose_name=_('Comment')) class Meta: unique_together = [('org_id', 'name')] ordering = ('name', ) def __str__(self): category_display = self.get_category_display() type_display = self.get_type_display() return f'{self.name}({type_display})[{category_display}]' @property def category_remote_app(self): return self.category == const.ApplicationCategoryChoices.remote_app.value def get_rdp_remote_app_setting(self): from applications.serializers.attrs import get_serializer_class_by_application_type if not self.category_remote_app: raise ValueError(f"Not a remote app application: {self.name}") serializer_class = get_serializer_class_by_application_type(self.type) fields = serializer_class().get_fields() parameters = [self.type] for field_name in list(fields.keys()): if field_name in ['asset']: continue value = self.attrs.get(field_name) if not value: continue if field_name == 'path': value = '\"%s\"' % value parameters.append(str(value)) parameters = ' '.join(parameters) return { 'program': '||jmservisor', 'working_directory': '', 'parameters': parameters } def get_remote_app_asset(self): asset_id = self.attrs.get('asset') if not asset_id: raise ValueError("Remote App not has asset attr") asset = Asset.objects.filter(id=asset_id).first() return asset
class Migration(migrations.Migration): initial = True dependencies = [ ("sessions", "0001_initial"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ("games", "0001_initial"), ("auth", "0012_alter_user_first_name_max_length"), ] operations = [ migrations.CreateModel( name="GameMembership", fields=[ ( "id", models.BigAutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ( "date_created", models.DateTimeField( default=django.utils.timezone.now, help_text="The datetime the user created an account for the game.", ), ), ( "last_updated", models.DateTimeField( default=django.utils.timezone.now, help_text="The last datetime the user played the game.", ), ), ( "game", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name="memberships", to="games.game", ), ), ], ), migrations.CreateModel( name="Mail", fields=[ ( "id", models.BigAutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ( "date_created", models.DateTimeField( default=django.utils.timezone.now, help_text="The datetime this mail was created.", ), ), ("title", models.CharField(default="", max_length=128)), ("content", models.TextField(default="", max_length=4096)), ( "from_user", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name="sent_mail", to=settings.AUTH_USER_MODEL, ), ), ], ), migrations.CreateModel( name="UserProfile", fields=[ ( "user", models.OneToOneField( on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to="auth.user", ), ), ( "last_updated", models.DateTimeField( default=django.utils.timezone.now, help_text="The last datetime of any user activity.", ), ), ("about", models.TextField(blank=True, default="", max_length=4096)), ("title", models.CharField(default="Initiate", max_length=256)), ( "settings", models.JSONField( blank=True, default=dict, help_text="User profile in JSON or YAML format.", ), ), ( "color_theme", models.CharField( choices=[ ("#173F5F", "p1_darkblue"), ("#20639B", "p1_blue"), ("#3CAEA3", "p1_green"), ("#F6D55C", "p1_yellow"), ("#ED553B", "p1_red"), ], default="#173F5F", max_length=16, ), ), ( "games", models.ManyToManyField( through="user_profile.GameMembership", to="games.Game" ), ), ("mail", models.ManyToManyField(to="user_profile.Mail")), ], ), migrations.CreateModel( name="UserRelationship", fields=[ ( "id", models.BigAutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ( "date_created", models.DateTimeField( default=django.utils.timezone.now, help_text="The datetime this connection was created.", ), ), ( "relationship_type", models.CharField( choices=[("Friend", "Friend")], default="Friend", help_text="Type of the connection (Friend, ...)", max_length=256, ), ), ( "relationship_state", models.CharField( choices=[("Request", "Request"), ("Accepted", "Accepted")], default="Request", help_text="State of the connection (Accepted, Request, ...)", max_length=256, ), ), ( "other_profile", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to="user_profile.userprofile", ), ), ( "user_profile", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name="user_relationships", to="user_profile.userprofile", ), ), ], ), migrations.AddField( model_name="userprofile", name="relationships", field=models.ManyToManyField( related_name="_user_profile_userprofile_relationships_+", through="user_profile.UserRelationship", to="user_profile.UserProfile", ), ), migrations.AddField( model_name="userprofile", name="session", field=models.ForeignKey( null=True, on_delete=django.db.models.deletion.SET_NULL, to="sessions.session", ), ), migrations.AddField( model_name="mail", name="user_profile", field=models.ForeignKey( blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name="received_mail", to="user_profile.userprofile", ), ), migrations.AddField( model_name="gamemembership", name="profile", field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name="memberships", to="user_profile.userprofile", ), ), ]
class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Client', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=200)), ('photo', models.ImageField(blank=True, null=True, upload_to='images/client_profiles')), ('type', models.CharField(choices=[('pers', 'Person'), ('comp', 'Company')], max_length=4)), ('is_vip', models.BooleanField(default=False)), ('want_ads', models.BooleanField(default=True)), ('source', models.CharField(max_length=100)), ('comments', models.JSONField(blank=True, null=True)), ('address', models.CharField(blank=True, max_length=500, null=True)), ('postal_address', models.CharField(blank=True, max_length=500, null=True)), ('phone', models.CharField(blank=True, max_length=50, null=True)), ('fax', models.CharField(blank=True, max_length=50, null=True)), ('email', models.EmailField(blank=True, max_length=254, null=True)), ('socials', models.JSONField(blank=True, null=True)), ('website', models.URLField(blank=True, null=True)), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('manager', models.ForeignKey( on_delete=django.db.models.deletion.RESTRICT, related_name='clients', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ['name'], }, ), migrations.CreateModel( name='ClientStatus', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=70)), ], options={ 'ordering': ['name'], }, ), migrations.CreateModel( name='ContactPerson', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=200)), ('photo', models.ImageField(blank=True, null=True, upload_to='images/client_profiles')), ('position', models.CharField(max_length=150)), ('comments', models.JSONField(blank=True, null=True)), ('address', models.CharField(blank=True, max_length=500, null=True)), ('postal_address', models.CharField(blank=True, max_length=500, null=True)), ('phone', models.CharField(blank=True, max_length=50, null=True)), ('fax', models.CharField(blank=True, max_length=50, null=True)), ('email', models.EmailField(blank=True, max_length=254, null=True)), ('socials', models.JSONField(blank=True, null=True)), ('website', models.URLField(blank=True, null=True)), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_persons', to='clients.client')), ], options={ 'ordering': ['name'], }, ), migrations.AddField( model_name='client', name='status', field=models.ForeignKey( null=True, on_delete=django.db.models.deletion.SET_NULL, to='clients.clientstatus'), ), ]
def test_validation_error(self): field = models.JSONField() msg = 'Value must be valid JSON.' value = uuid.UUID('{d85e2076-b67c-4ee7-8c3a-2bf5a2cc2475}') with self.assertRaisesMessage(ValidationError, msg): field.clean({'uuid': value}, None)
class InstrumentInfo(models.Model): experiment = models.ForeignKey(Experiments, on_delete=models.CASCADE) data_json = models.JSONField() class Meta: db_table = 'instrument_info'
class Message(models.Model): DATA = 'data' NOTIFICATION = 'notification' MESSAGE_TYPES = [(DATA, 'Data'), (NOTIFICATION, 'Notification')] uuid = models.UUIDField(default=uuid.uuid4, unique=True, editable=False) message_type = models.CharField(max_length=20, choices=MESSAGE_TYPES) recipient = models.ForeignKey(User, on_delete=models.CASCADE) device = models.ForeignKey(Device, null=True, on_delete=models.CASCADE) data = models.JSONField(null=True) content = models.TextField(null=True) title = models.TextField(max_length=150, null=True) body = models.TextField(max_length=355, null=True) collapse_subject = models.CharField(max_length=150, null=True) external_id = models.CharField(max_length=150, null=True) created = models.DateTimeField(auto_now_add=True) objects = MessageReceiptQuerySet.as_manager() def __str__(self): return '%s (%s)' % (self.recipient.username, self.message_type) def __load_message_receipts(self): message_receipts = {} for _receipt in MessageReceipt.objects.filter(message=self): message_receipts[_receipt.type] = _receipt.time self._message_receipts = message_receipts def __get_receipt_time(self, receipt_type): message_receipts = self.get_message_receipts() if receipt_type in message_receipts: return self._message_receipts[receipt_type] else: return None def update_message_receipts(self): if self.device.type != 'onesignal': raise RuntimeError('No client for message device') client = OneSignalClient(self.device) receipts = client.get_message_receipts(self.external_id) for receipt_type, receipt_datetime in receipts.items(): try: receipt = MessageReceipt.objects.get(message=self, type=receipt_type) except MessageReceipt.DoesNotExist: receipt = MessageReceipt(message=self, type=receipt_type) receipt.time = receipt_datetime receipt.save() def get_message_receipts(self): if not hasattr(self, '_message_receipts'): self.__load_message_receipts() return self._message_receipts def set_message_receipts(self, message_receipts): self._message_receipts = message_receipts @property def sent(self): return self.__get_receipt_time(MessageReceipt.SENT) @property def received(self): return self.__get_receipt_time(MessageReceipt.RECEIVED) @property def displayed(self): return self.__get_receipt_time(MessageReceipt.DISPLAYED) @property def opened(self): return self.__get_receipt_time(MessageReceipt.OPENED) @property def engaged(self): return self.__get_receipt_time(MessageReceipt.ENGAGED)
class Migration(migrations.Migration): initial = True dependencies = [ ('system', '0001_initial'), ] operations = [ migrations.CreateModel( name='Professor', fields=[ ], options={ 'proxy': True, 'indexes': [], 'constraints': [], }, bases=('system.user',), ), migrations.CreateModel( name='ProfessorFields', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('is_adminstrator', models.BooleanField(default=False)), ('grade', models.CharField(choices=[('0', 'Instructor'), ('1', 'Assistant'), ('2', 'Associate'), ('3', 'Professor')], max_length=10)), ('last_university', models.CharField(max_length=100)), ('group', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='system.group')), ('professor', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='professor.professor')), ], ), migrations.CreateModel( name='PHDRequest', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('request_id', models.CharField(blank=True, max_length=10)), ('status', models.IntegerField(choices=[(0, 'Rejected'), (1, 'Accepted'), (2, 'In Progress')], default=2)), ('clause_a', models.JSONField()), ('clause_b', models.CharField(max_length=20)), ('created_at', models.DateField(auto_now_add=True)), ('manager_ok', models.BooleanField(default=False)), ('staff_ok', models.BooleanField(default=False)), ('professor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='professor.professor')), ], ), migrations.CreateModel( name='MasterRequest', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('request_id', models.CharField(blank=True, max_length=10)), ('status', models.IntegerField(choices=[(0, 'Rejected'), (1, 'Accepted'), (2, 'In Progress')], default=2)), ('type_1', models.SmallIntegerField()), ('type_2', models.SmallIntegerField()), ('created_at', models.DateField(auto_now_add=True)), ('manager_ok', models.BooleanField(default=False)), ('staff_ok', models.BooleanField(default=False)), ('orientation', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='system.orientation')), ('professor', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='professor.professor')), ], ), ]
class Migration(migrations.Migration): dependencies = [ ("posthog", "0143_user_uuid"), ] operations = [ migrations.AlterField( model_name="actionstep", name="properties", field=models.JSONField(blank=True, default=list, null=True), ), migrations.AlterField(model_name="cohort", name="groups", field=models.JSONField(default=list),), migrations.AlterField(model_name="dashboard", name="filters", field=models.JSONField(default=dict),), migrations.AlterField(model_name="dashboarditem", name="filters", field=models.JSONField(default=dict),), migrations.AlterField(model_name="dashboarditem", name="layouts", field=models.JSONField(default=dict),), migrations.AlterField(model_name="element", name="attributes", field=models.JSONField(default=dict),), migrations.AlterField( model_name="event", name="elements", field=models.JSONField(blank=True, default=list, null=True), ), migrations.AlterField(model_name="event", name="properties", field=models.JSONField(default=dict),), migrations.AlterField(model_name="featureflag", name="filters", field=models.JSONField(default=dict),), migrations.AlterField( model_name="organization", name="personalization", field=models.JSONField(blank=True, default=dict), ), migrations.AlterField(model_name="person", name="properties", field=models.JSONField(default=dict),), migrations.AlterField(model_name="plugin", name="config_schema", field=models.JSONField(default=dict),), migrations.AlterField(model_name="plugin", name="error", field=models.JSONField(default=None, null=True),), migrations.AlterField(model_name="pluginconfig", name="config", field=models.JSONField(default=dict),), migrations.AlterField( model_name="pluginconfig", name="error", field=models.JSONField(default=None, null=True), ), migrations.AlterField( model_name="sessionrecordingevent", name="snapshot_data", field=models.JSONField(default=dict), ), migrations.AlterField(model_name="sessionsfilter", name="filters", field=models.JSONField(default=dict),), migrations.AlterField( model_name="team", name="data_attributes", field=models.JSONField(default=posthog.models.team.get_default_data_attributes), ), migrations.AlterField(model_name="team", name="event_names", field=models.JSONField(default=list),), migrations.AlterField(model_name="team", name="event_names_with_usage", field=models.JSONField(default=list),), migrations.AlterField(model_name="team", name="event_properties", field=models.JSONField(default=list),), migrations.AlterField( model_name="team", name="event_properties_numerical", field=models.JSONField(default=list), ), migrations.AlterField( model_name="team", name="event_properties_with_usage", field=models.JSONField(default=list), ), migrations.AlterField(model_name="team", name="test_account_filters", field=models.JSONField(default=list),), migrations.AlterField( model_name="user", name="first_name", field=models.CharField(blank=True, max_length=150, verbose_name="first name"), ), ]
class UserConfig(models.Model): """ This model stores arbitrary user-specific preferences in a JSON data structure. """ user = models.OneToOneField(to=User, on_delete=models.CASCADE, related_name='config') data = models.JSONField(default=dict) class Meta: ordering = ['user'] verbose_name = verbose_name_plural = 'User Preferences' def get(self, path, default=None): """ Retrieve a configuration parameter specified by its dotted path. Example: userconfig.get('foo.bar.baz') :param path: Dotted path to the configuration key. For example, 'foo.bar' returns self.data['foo']['bar']. :param default: Default value to return for a nonexistent key (default: None). """ d = self.data keys = path.split('.') # Iterate down the hierarchy, returning the default value if any invalid key is encountered for key in keys: if type(d) is dict and key in d: d = d.get(key) else: return default return d def all(self): """ Return a dictionary of all defined keys and their values. """ return flatten_dict(self.data) def set(self, path, value, commit=False): """ Define or overwrite a configuration parameter. Example: userconfig.set('foo.bar.baz', 123) Leaf nodes (those which are not dictionaries of other nodes) cannot be overwritten as dictionaries. Similarly, branch nodes (dictionaries) cannot be overwritten as single values. (A TypeError exception will be raised.) In both cases, the existing key must first be cleared. This safeguard is in place to help avoid inadvertently overwriting the wrong key. :param path: Dotted path to the configuration key. For example, 'foo.bar' sets self.data['foo']['bar']. :param value: The value to be written. This can be any type supported by JSON. :param commit: If true, the UserConfig instance will be saved once the new value has been applied. """ d = self.data keys = path.split('.') # Iterate through the hierarchy to find the key we're setting. Raise TypeError if we encounter any # interim leaf nodes (keys which do not contain dictionaries). for i, key in enumerate(keys[:-1]): if key in d and type(d[key]) is dict: d = d[key] elif key in d: err_path = '.'.join(path.split('.')[:i + 1]) raise TypeError( f"Key '{err_path}' is a leaf node; cannot assign new keys") else: d = d.setdefault(key, {}) # Set a key based on the last item in the path. Raise TypeError if attempting to overwrite a non-leaf node. key = keys[-1] if key in d and type(d[key]) is dict: raise TypeError( f"Key '{path}' has child keys; cannot assign a value") else: d[key] = value if commit: self.save() def clear(self, path, commit=False): """ Delete a configuration parameter specified by its dotted path. The key and any child keys will be deleted. Example: userconfig.clear('foo.bar.baz') Invalid keys will be ignored silently. :param path: Dotted path to the configuration key. For example, 'foo.bar' deletes self.data['foo']['bar']. :param commit: If true, the UserConfig instance will be saved once the new value has been applied. """ d = self.data keys = path.split('.') for key in keys[:-1]: if key not in d: break if type(d[key]) is dict: d = d[key] key = keys[-1] d.pop(key, None) # Avoid a KeyError on invalid keys if commit: self.save()
class Migration(migrations.Migration): dependencies = [ ('hpaction', '0026_auto_20201104_1528'), ] operations = [ migrations.AlterField( model_name='feewaiverdetails', name='asked_before', field=models.BooleanField( blank=True, help_text='Whether the user has requested a fee waiver before.', null=True), ), migrations.AlterField( model_name='feewaiverdetails', name='receives_public_assistance', field=models.BooleanField( blank=True, help_text= 'Whether the user receives any kind of public assistance benefits, e.g. cash benefits, rent assistance, food stamps, Medicaid.', null=True), ), migrations.AlterField( model_name='harassmentdetails', name='more_than_one_family_per_apartment', field=models.BooleanField( blank=True, help_text= 'Is there more than one family living in each apartment?', null=True), ), migrations.AlterField( model_name='harassmentdetails', name='two_or_less_apartments_in_building', field=models.BooleanField( blank=True, help_text='Does you building have 2 apartments or less?', null=True), ), migrations.AlterField( model_name='hpactiondetails', name='filed_with_311', field=models.BooleanField( blank=True, help_text= 'Whether the user has filed any complaints with 311 before.', null=True), ), migrations.AlterField( model_name='hpactiondetails', name='hpd_issued_violations', field=models.BooleanField( blank=True, help_text='Whether HPD issued any violations.', null=True), ), migrations.AlterField( model_name='hpactiondetails', name='sue_for_harassment', field=models.BooleanField( blank=True, help_text='Whether the user wants to sue for harassment.', null=True), ), migrations.AlterField( model_name='hpactiondetails', name='sue_for_repairs', field=models.BooleanField( blank=True, help_text='Whether the user wants to sue for repairs.', null=True), ), migrations.AlterField( model_name='hpactiondetails', name='thirty_days_since_311', field=models.BooleanField( blank=True, help_text= 'Whether 30 days have passed since the user filed complaints with 311.', null=True), ), migrations.AlterField( model_name='hpactiondetails', name='thirty_days_since_violations', field=models.BooleanField( blank=True, help_text= 'Whether 30 days have passed since HPD issued violations.', null=True), ), migrations.AlterField( model_name='hpactiondetails', name='urgent_and_dangerous', field=models.BooleanField( blank=True, help_text='Whether the conditions are urgent and dangerous.', null=True), ), migrations.AlterField( model_name='servingpapers', name='lob_letter_object', field=models.JSONField( blank=True, help_text= 'If the papers were sent via Lob, this is the JSON response of the API call that was made to send them, documented at https://lob.com/docs/python#letters.', null=True), ), ]
def test_custom_encoder(self): field = models.JSONField(encoder=DjangoJSONEncoder) value = uuid.UUID('{d85e2076-b67c-4ee7-8c3a-2bf5a2cc2475}') field.clean({'uuid': value}, None)
class Datapoint(ModelWithIterableFields): """ Model for a datapoint. A datapoint represents one source of information. Devices or Webservices will typically emit information on more then one datapoints. E.g. climate sensor in a room might publish temperature and humidity measurements. Both will be treated as individual datapoints as this allows us to abstract away the complexity of the devices. TODO: May be replace delete with deactivate, else we will might end with entries in the ValueDB with unknown origin (deleted datapoints will be rentered but with new id) TODO: Add consistency checks on save. """ origin_id = models.TextField( null=True, unique=True, blank=True, help_text=( "This id used if Datapoints data is pushed in from an external tool" " (e.g. BEMCom) that maintains it's own list datapoint metadata. " "This field allows matching the ids of the external tools with " "the database table of the EMP, effectively allowing the EMP " "to store Datapoints additionally to what is pushed from the " "external tool, e.g. for mock (fake) values.")) short_name = models.SlugField( max_length=30, null=True, # required to allow migrating tables without this filed. default=None, unique=True, blank=False, # New datappoints should not have an empty short_name help_text=("A short name to identify the datapoint.")) TYPE_CHOICES = [ ("sensor", "Sensor"), ("actuator", "Actuator"), ] type = models.CharField( max_length=8, default=None, null=False, choices=TYPE_CHOICES, help_text=("Datapoint type, can be ether sensor or actuator.")) # Defines the data format of the datapoint, i.e. which additional metadata # we can expect to have reasonable values. # # The formats have the following meanings: # numeric: The value of the datapoint can be stored as a float. # text: The value of the datapoint can be stored as a string. # generic: No additional information. # continuous: The value is a continuous variable with an optional max # and min value, that can take any value in between. # discrete: The value of the datapoint can take one value of limited set # of possible values. data_format_choices = [ ("generic_numeric", "Generic Numeric"), ("continuous_numeric", "Continuous Numeric"), ("discrete_numeric", "Discrete Numeric"), ("generic_text", "Generic Text"), ("discrete_text", "Discrete Text"), ] # Use generic_text as default as it imposes no constraints on the datapoint # apart from that the value can be stored as string, which should always # be possible as the value has been received as a JSON string. data_format = models.CharField( max_length=18, choices=data_format_choices, default="generic_text", help_text=( "Format of the datapoint value. Additionally defines which meta" "data is available for it. See documentation in code for details." )) # Don't limit this, people should never need to use abbreviations or # shorten their thoughts just b/c the field is too short. description = models.TextField( editable=True, blank=True, help_text=( "A human readable description of the datapoint targeted on " "users of the API wihtout knowledge about connector details.")) origin_description = models.TextField( editable=True, blank=True, help_text=( "A human readable description of the datapoint targeted on " "users of the API wihtout knowledge about connector details." "This is the description provided by the extenral tool.")) # ########################################################################## # # Below all datapoint fields that may or may not be populated for a # particular datapoint depending on the data_format and type. # ########################################################################## # last_value = models.TextField( null=True, blank=True, default=None, help_text=( "The last value received for the datapoint. We store all values " "including numeric as strings as this simplfies the logic " "significantly and prevents unintended side effects, e.g. data " "loss if the data format field is changed." "")) last_value_timestamp = models.DateTimeField( null=True, blank=True, default=None, help_text=("The timestamp of the last value received via MQTT.")) last_setpoint = models.JSONField( null=True, blank=True, default=None, help_text=("The last schedule received for the datapoint. " "Applicable to actuator datapoints.")) last_setpoint_timestamp = models.DateTimeField( null=True, blank=True, default=None, help_text=( "The timestamp of the last value received for the datapoint." "Applicable to actuator datapoints.")) last_schedule = models.JSONField( null=True, blank=True, default=None, help_text=("The last schedule received for the datapoint." "Applicable to actuator datapoints.")) last_schedule_timestamp = models.DateTimeField( null=True, blank=True, default=None, help_text=( "The timestamp of the last value received for the datapoint." "Applicable to actuator datapoints.")) allowed_values = models.JSONField( null=True, blank=True, default=None, help_text=("Allowed values. Applicable to discrete valued datapoints. " "Must be a valid JSON string.")) min_value = models.FloatField( blank=True, null=True, default=None, help_text=("The minimal expected value of the datapoint. " "Applicable to numeric datapoints.")) max_value = models.FloatField( blank=True, null=True, default=None, help_text=("The maximal expected value of the datapoint. " "Applicable to numeric datapoints.")) unit = models.TextField( editable=True, default="", blank=True, help_text=("The unit in SI notation, e.g. Mg*m*s^-2 aka. kN. " "Applicable to numeric datapoints.")) def save(self, *args, **kwargs): """ Replace an empty string in external id with None (which cannot be entered directly in the admin), as every None is unique while an empty string violates the unique constraint. However, we want external id only to be unique if it is set. """ if self.origin_id == "": self.origin_id = None super().save(*args, **kwargs) def __str__(self): if self.short_name is not None: return (str(self.id) + " - " + self.short_name) else: return str(self.id)
def test_formfield(self): model_field = models.JSONField() form_field = model_field.formfield() self.assertIsInstance(form_field, forms.JSONField)
class Feed(models.Model): """ A Feed entity """ # Entity control fields added_by = models.ForeignKey(to='auth.User', help_text="User who added this feed", verbose_name="Added by", db_index=True, on_delete=models.CASCADE) broken = models.BooleanField( default=False, help_text="Denotes whether this feed is broken or not", verbose_name="Broken", db_index=True) error = models.TextField(blank=True, null=True, help_text="Error when a problem occurs", verbose_name="Error") feed_url = models.TextField(validators=[URLValidator()], help_text="URL of the RSS feed", verbose_name="Feed URL", unique=True) type = models.CharField(choices=constants.FEED_TYPES, db_index=True, max_length=30, verbose_name="Type", help_text="Type of the feed") etag = models.CharField(blank=True, null=True, max_length=100, verbose_name="E-Tag", help_text="E-Tag header") # Dates created_at = models.DateTimeField( auto_now_add=True, help_text="Date this feed has been added", verbose_name="Created at") last_checked_at = models.DateTimeField( auto_now_add=True, help_text="Last time the feed has been checked", verbose_name="Last checked at") last_updated_at = models.DateTimeField( blank=True, null=True, help_text="Last time the feed has been updated", verbose_name="Last updated at") # Required data fields title = models.TextField(help_text="Title of the feed", verbose_name="Title", db_index=True) # Optional data fields alternate_title = models.TextField( blank=True, null=True, help_text="Alternate title for the feed", verbose_name="Alternate title") site_url = models.TextField(blank=True, null=True, validators=[URLValidator()], help_text="URL of the feed's website", verbose_name="Site URL") extra_data = models.JSONField(verbose_name="Extra data", db_index=True, null=True) # Manager objects = managers.FeedManager() # Methods and meta class Meta: ordering = ( 'title', 'last_updated_at', ) def __str__(self): return self.alternate_title or self.title def _update_entries(self, entries: List[Dict]) -> datetime: """ Adds or updates the feeds entries given an entry list :param entries: :return: Datetime of the latest entry """ latest = None # Keeps track of the latest entry for entry in entries: # Get entry entity entry = Entry.objects.from_raw_entry(entry) entry.feed = self # Try to match by guid, then url, then title and date if entry.guid: query = {'guid': entry.guid} elif entry.url: query = {'url': entry.url} elif entry.title: query = {'title': entry.title, 'date': entry.date} else: # An Entry must contain at least one of the above raise CrispyException( "Can't find an entry identifier, cannot import") # Update existing entries and delete old try: existing = self.entries.get(**query) except models.ObjectDoesNotExist: # Existing entry not found, create it entry.save() else: # Existing entry found # Updates entry if newer if entry.date and entry.date > existing.date: existing.update(entry) # Update latest tracker if not latest or (entry.date and entry.date > latest): latest = entry.date return latest def _update_feed_data(self, feed_data_obj: feedparser.FeedParserDict) -> None: """ Updates feed data given a "feedparser.FeedParserDict.feed" object :param feed_data_obj: :return: """ self.title = feed_data_obj['title'] site_url = feed_data_obj.get('link', None) if site_url: self.site_url = site_url def update_feed_data(self, parsed_feed: feedparser.FeedParserDict) -> None: # Set feed type if not self.type: self.type = parsed_feed.version # Update feed data self._update_feed_data(parsed_feed.feed) def update_feed_entries(self, entries: List[Dict]): # Update entries try: last_updated = self._update_entries(entries) except CrispyException as e: if self.error: self.error += ". \n" self.error += "Entry error: {}".format(e) return False else: # Update last updated self.last_updated_at = last_updated
def test_formfield_custom_encoder_decoder(self): model_field = models.JSONField(encoder=DjangoJSONEncoder, decoder=CustomJSONDecoder) form_field = model_field.formfield() self.assertIs(form_field.encoder, DjangoJSONEncoder) self.assertIs(form_field.decoder, CustomJSONDecoder)
class Migration(migrations.Migration): dependencies = [ ('announcements', '0012_auto_20220202_1830'), ] operations = [ migrations.AlterField( model_name='announcement', name='body', field=models.JSONField( default=list, validators=[ core.validators.JSONSchemaValidator( limit_value={ 'description': 'The body, or main content, of an announcement which can contain various HTML elements.', 'items': { 'anyOf': [{ 'additionalProperties': False, 'properties': { 'element': { 'enum': ['hr'], 'title': 'HTML Element', 'type': 'string' } }, 'required': ['element'], 'title': 'Horizontal Rule', 'type': 'object' }, { 'additionalProperties': False, 'properties': { 'content': { 'pattern': '(^(?!\\s*)$)|(^.*\\S.*$)', 'title': 'Paragraph Text', 'type': 'string' }, 'element': { 'enum': ['p'], 'title': 'HTML Element', 'type': 'string' } }, 'required': ['element', 'content'], 'title': 'Paragraph', 'type': 'object' }, { 'additionalProperties': False, 'properties': { 'alt': { 'pattern': '(^(?!\\s*)$)|(^.*\\S.*$)', 'title': 'Alternate Text', 'type': 'string' }, 'element': { 'enum': ['img'], 'title': 'HTML Element', 'type': 'string' }, 'url': { 'format': 'uri', 'title': 'Image URL', 'type': 'string' } }, 'required': ['element', 'alt', 'url'], 'title': 'Image', 'type': 'object' }, { 'additionalProperties': False, 'properties': { 'content': { 'title': 'Header Text', 'type': 'string' }, 'element': { 'enum': [ 'h1', 'h2', 'h3', 'h4', 'h5', 'h6' ], 'title': 'HTML Element', 'type': 'string' } }, 'required': ['element', 'content'], 'title': 'Header', 'type': 'object' }, { 'additionalProperties': False, 'properties': { 'content': { 'title': 'Hyperlink Text', 'type': 'string' }, 'element': { 'enum': ['a'], 'title': 'HTML Element', 'type': 'string' }, 'href': { 'format': 'uri', 'title': 'Hypertext Reference', 'type': 'string' } }, 'required': ['element', 'href', 'content'], 'title': 'Anchor', 'type': 'object' }], 'title': 'Element' }, 'schema': 'http://json-schema.org/draft-07/schema#', 'title': 'Announcement Body', 'type': 'array' }) ], verbose_name='Announcement Body'), ), ]
def test_deconstruct(self): field = models.JSONField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, 'django.db.models.JSONField') self.assertEqual(args, []) self.assertEqual(kwargs, {})
class ListInputDefinition(InputDefinition): element_type = models.CharField( max_length=3, choices=ListElementTypes.choices() ) min_length = models.PositiveIntegerField(blank=True, null=True) max_length = models.PositiveIntegerField(blank=True, null=True) default = models.JSONField(blank=True, null=True) as_tuple = models.BooleanField(default=False) input_class = ListInput def raise_not_list_error(self) -> None: raise ValidationError( _("Default ListInputDefinition value must be a list instance!") ) def raise_wrong_type_error(self) -> None: required_type = ListElementTypes[self.element_type].value.lower() raise ValidationError( _(f"List elements must be of type {required_type}") ) def validate_elements_type_for_default(self) -> bool: required_type = TYPES_DICT[ListElementTypes[self.element_type]] if required_type == "file": return all([Path(element).is_file() for element in self.default]) return all( [type(element) == required_type for element in self.default] ) def validate_default_value_min_length(self) -> bool: return ( len(self.default) >= self.min_length if self.min_length else True ) def raise_min_length_error(self) -> None: raise ValidationError( _(f"Default value must have at least {self.min_length} elements!") ) def validate_default_value_max_length(self) -> bool: return ( len(self.default) <= self.max_length if self.max_length else True ) def raise_max_length_error(self) -> None: raise ValidationError( _(f"Default value must have at most {self.max_length} elements!") ) def validate_default_value(self) -> None: if not isinstance(self.default, list): self.raise_not_list_error() if not self.validate_elements_type_for_default(): self.raise_wrong_type_error() if not self.validate_default_value_min_length(): self.raise_min_length_error() if not self.validate_default_value_max_length(): self.raise_max_length_error() def validate(self): if self.default is not None: self.validate_default_value() super().validate() def get_type(self) -> InputDefinitions: return InputDefinitions.LST @property def expected_type_definition(self) -> ListElementTypes: return ListElementTypes[self.element_type]
def test_deconstruct_custom_encoder_decoder(self): field = models.JSONField(encoder=DjangoJSONEncoder, decoder=CustomJSONDecoder) name, path, args, kwargs = field.deconstruct() self.assertEqual(kwargs['encoder'], DjangoJSONEncoder) self.assertEqual(kwargs['decoder'], CustomJSONDecoder)
class Migration(migrations.Migration): dependencies = [ ("archives", "0011_archive_hanging_protocol"), ] operations = [ migrations.AddField( model_name="archive", name="view_content", field=models.JSONField( blank=True, default=dict, validators=[ grandchallenge.core.validators.JSONValidator( schema={ "$schema": "http://json-schema.org/draft-06/schema#", "additionalProperties": False, "definitions": {}, "properties": { "denary": { "contains": { "type": "string" }, "minItems": 1, "type": "array", "uniqueItems": True, }, "main": { "contains": { "type": "string" }, "minItems": 1, "type": "array", "uniqueItems": True, }, "nonary": { "contains": { "type": "string" }, "minItems": 1, "type": "array", "uniqueItems": True, }, "octonary": { "contains": { "type": "string" }, "minItems": 1, "type": "array", "uniqueItems": True, }, "quaternary": { "contains": { "type": "string" }, "minItems": 1, "type": "array", "uniqueItems": True, }, "quinary": { "contains": { "type": "string" }, "minItems": 1, "type": "array", "uniqueItems": True, }, "secondary": { "contains": { "type": "string" }, "minItems": 1, "type": "array", "uniqueItems": True, }, "senary": { "contains": { "type": "string" }, "minItems": 1, "type": "array", "uniqueItems": True, }, "septenary": { "contains": { "type": "string" }, "minItems": 1, "type": "array", "uniqueItems": True, }, "tertiary": { "contains": { "type": "string" }, "minItems": 1, "type": "array", "uniqueItems": True, }, }, "title": "The Display Port Mapping Schema", "type": "object", }) ], ), ), ]
def test_invalid_encoder(self): msg = 'The encoder parameter must be a callable object.' with self.assertRaisesMessage(ValueError, msg): models.JSONField(encoder=DjangoJSONEncoder())
class JobResult(BaseModel, CustomFieldModel): """ This model stores the results from running a user-defined report. """ name = models.CharField(max_length=255) obj_type = models.ForeignKey( to=ContentType, related_name="job_results", verbose_name="Object types", limit_choices_to=FeatureQuery("job_results"), help_text="The object type to which this job result applies", on_delete=models.CASCADE, ) created = models.DateTimeField(auto_now_add=True) completed = models.DateTimeField(null=True, blank=True) user = models.ForeignKey( to=settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name="+", blank=True, null=True ) status = models.CharField( max_length=30, choices=JobResultStatusChoices, default=JobResultStatusChoices.STATUS_PENDING, ) data = models.JSONField(encoder=DjangoJSONEncoder, null=True, blank=True) schedule = models.ForeignKey(to="extras.ScheduledJob", on_delete=models.SET_NULL, null=True, blank=True) """ Although "data" is technically an unstructured field, we have a standard structure that we try to adhere to. This structure is created loosely as a superset of the formats used by Scripts and Reports in NetBox 2.10. Log Messages now go to their own object, the JobLogEntry. data = { "output": <optional string, such as captured stdout/stderr>, } """ job_id = models.UUIDField(unique=True) class Meta: ordering = ["-created"] get_latest_by = "created" def __str__(self): return str(self.job_id) @property def duration(self): if not self.completed: return None duration = self.completed - self.created minutes, seconds = divmod(duration.total_seconds(), 60) return f"{int(minutes)} minutes, {seconds:.2f} seconds" @property def related_object(self): """Get the related object, if any, identified by the `obj_type`, `name`, and/or `job_id` fields. If `obj_type` is extras.Job, then the `name` is used to look up an extras.jobs.Job subclass based on the `class_path` of the Job subclass. Note that this is **not** the extras.models.Job model class nor an instance thereof. Else, if the the model class referenced by `obj_type` has a `name` field, our `name` field will be used to look up a corresponding model instance. This is used, for example, to look up a related `GitRepository`; more generally it can be used by any model that 1) has a unique `name` field and 2) needs to have a many-to-one relationship between JobResults and model instances. Else, the `obj_type` and `job_id` will be used together as a quasi-GenericForeignKey to look up a model instance whose PK corresponds to the `job_id`. This behavior is currently unused in the Nautobot core, but may be of use to plugin developers wishing to create JobResults that have a one-to-one relationship to plugin model instances. """ from nautobot.extras.jobs import get_job # needed here to avoid a circular import issue if self.obj_type == ContentType.objects.get(app_label="extras", model="job"): # Related object is an extras.Job subclass, our `name` matches its `class_path` return get_job(self.name) model_class = self.obj_type.model_class() if hasattr(model_class, "name"): # See if we have a many-to-one relationship from JobResult to model_class record, based on `name` try: return model_class.objects.get(name=self.name) except model_class.DoesNotExist: pass # See if we have a one-to-one relationship from JobResult to model_class record based on `job_id` try: return model_class.objects.get(id=self.job_id) except model_class.DoesNotExist: pass return None @property def related_name(self): """ Similar to self.name, but if there's an appropriate `related_object`, use its name instead. """ related_object = self.related_object if not related_object: return self.name if hasattr(related_object, "name"): return related_object.name return str(related_object) def get_absolute_url(self): return reverse("extras:jobresult", kwargs={"pk": self.pk}) def set_status(self, status): """ Helper method to change the status of the job result. If the target status is terminal, the completion time is also set. """ self.status = status if status in JobResultStatusChoices.TERMINAL_STATE_CHOICES: self.completed = timezone.now() @classmethod def enqueue_job(cls, func, name, obj_type, user, celery_kwargs=None, *args, schedule=None, **kwargs): """ Create a JobResult instance and enqueue a job using the given callable func: The callable object to be enqueued for execution name: Name for the JobResult instance obj_type: ContentType to link to the JobResult instance obj_type user: User object to link to the JobResult instance celery_kwargs: Dictionary of kwargs to pass as **kwargs to Celery when job is queued args: additional args passed to the callable schedule: Optional ScheduledJob instance to link to the JobResult kwargs: additional kwargs passed to the callable """ job_result = cls.objects.create(name=name, obj_type=obj_type, user=user, job_id=uuid.uuid4(), schedule=schedule) kwargs["job_result_pk"] = job_result.pk # Prepare kwargs that will be sent to Celery if celery_kwargs is None: celery_kwargs = {} func.apply_async(args=args, kwargs=kwargs, task_id=str(job_result.job_id), **celery_kwargs) return job_result def log( self, message, obj=None, level_choice=LogLevelChoices.LOG_DEFAULT, grouping="main", logger=None, use_default_db=False, ): """ General-purpose API for storing log messages in a JobResult's 'data' field. message (str): Message to log obj (object): Object associated with this message, if any level_choice (LogLevelChoices): Message severity level grouping (str): Grouping to store the log message under logger (logging.logger): Optional logger to also output the message to use_default_db (bool): Use default database or JOB_LOGS """ if level_choice not in LogLevelChoices.as_dict(): raise Exception(f"Unknown logging level: {level_choice}") log = JobLogEntry( job_result=self, log_level=level_choice, grouping=grouping[:JOB_LOG_MAX_GROUPING_LENGTH], message=str(message), created=timezone.now().isoformat(), log_object=str(obj)[:JOB_LOG_MAX_LOG_OBJECT_LENGTH] if obj else None, absolute_url=obj.get_absolute_url()[:JOB_LOG_MAX_ABSOLUTE_URL_LENGTH] if hasattr(obj, "get_absolute_url") else None, ) # If the override is provided, we want to use the default database(pass no using argument) # Otherwise we want to use a separate database here so that the logs are created immediately # instead of within transaction.atomic(). This allows us to be able to report logs when the jobs # are running, and allow us to rollback the database without losing the log entries. if use_default_db or not JOB_LOGS: log.save() else: log.save(using=JOB_LOGS) if logger: if level_choice == LogLevelChoices.LOG_FAILURE: log_level = logging.ERROR elif level_choice == LogLevelChoices.LOG_WARNING: log_level = logging.WARNING else: log_level = logging.INFO logger.log(log_level, str(message))
def test_invalid_decoder(self): msg = 'The decoder parameter must be a callable object.' with self.assertRaisesMessage(ValueError, msg): models.JSONField(decoder=CustomJSONDecoder())
class ScheduledJob(BaseModel): """Model representing a periodic task.""" name = models.CharField( max_length=200, verbose_name="Name", help_text="Short Description For This Task", ) task = models.CharField( max_length=200, verbose_name="Task Name", help_text='The name of the Celery task that should be run. (Example: "proj.tasks.import_contacts")', ) job_class = models.CharField( max_length=255, verbose_name="Job Class", help_text="Name of the fully qualified Nautobot Job class path" ) interval = models.CharField(choices=JobExecutionType, max_length=255) args = models.JSONField(blank=True, default=list, encoder=NautobotKombuJSONEncoder) kwargs = models.JSONField(blank=True, default=dict, encoder=NautobotKombuJSONEncoder) queue = models.CharField( max_length=200, blank=True, null=True, default=None, verbose_name="Queue Override", help_text="Queue defined in CELERY_TASK_QUEUES. Leave None for default queuing.", ) one_off = models.BooleanField( default=False, verbose_name="One-off Task", help_text="If True, the schedule will only run the task a single time", ) start_time = models.DateTimeField( verbose_name="Start Datetime", help_text="Datetime when the schedule should begin triggering the task to run", ) enabled = models.BooleanField( default=True, verbose_name="Enabled", help_text="Set to False to disable the schedule", ) last_run_at = models.DateTimeField( editable=False, blank=True, null=True, verbose_name="Most Recent Run", help_text="Datetime that the schedule last triggered the task to run. " "Reset to None if enabled is set to False.", ) total_run_count = models.PositiveIntegerField( default=0, editable=False, verbose_name="Total Run Count", help_text="Running count of how many times the schedule has triggered the task", ) date_changed = models.DateTimeField( auto_now=True, verbose_name="Last Modified", help_text="Datetime that this scheduled job was last modified", ) description = models.TextField( blank=True, verbose_name="Description", help_text="Detailed description about the details of this scheduled job", ) user = models.ForeignKey( to=settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name="+", blank=True, null=True, help_text="User that requested the schedule", ) approved_by_user = models.ForeignKey( to=settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name="+", blank=True, null=True, help_text="User that approved the schedule", ) approval_required = models.BooleanField(default=False) approved_at = models.DateTimeField( editable=False, blank=True, null=True, verbose_name="Approval date/time", help_text="Datetime that the schedule was approved", ) objects = ScheduledJobExtendedQuerySet.as_manager() no_changes = False def __str__(self): return f"{self.name}: {self.interval}" def get_absolute_url(self): return reverse("extras:scheduledjob", kwargs={"pk": self.pk}) def save(self, *args, **kwargs): self.queue = self.queue or None if not self.enabled: self.last_run_at = None elif not self.last_run_at: # I'm not sure if this is a bug, or "works as designed", but if self.last_run_at is not set, # the celery beat scheduler will never pick up a recurring job. One-off jobs work just fine though. if self.interval in [ JobExecutionType.TYPE_HOURLY, JobExecutionType.TYPE_DAILY, JobExecutionType.TYPE_WEEKLY, ]: # A week is 7 days, otherwise the iteration is set to 1 multiplier = 7 if self.interval == JobExecutionType.TYPE_WEEKLY else 1 # Set the "last run at" time to one interval before the scheduled start time self.last_run_at = self.start_time - timedelta( **{JobExecutionType.CELERY_INTERVAL_MAP[self.interval]: multiplier}, ) super().save(*args, **kwargs) def clean(self): """ Model Validation """ if self.user and self.approved_by_user and self.user == self.approved_by_user: raise ValidationError("The requesting and approving users cannot be the same") # bitwise xor also works on booleans, but not on complex values if bool(self.approved_by_user) ^ bool(self.approved_at): raise ValidationError("Approval by user and approval time must either both be set or both be undefined") @property def schedule(self): if self.interval == JobExecutionType.TYPE_FUTURE: # This is one-time clocked task return clocked(clocked_time=self.start_time) return self.to_cron() @staticmethod def earliest_possible_time(): return timezone.now() + timedelta(seconds=15) def to_cron(self): t = self.start_time if self.interval == JobExecutionType.TYPE_HOURLY: return schedules.crontab(minute=t.minute) elif self.interval == JobExecutionType.TYPE_DAILY: return schedules.crontab(minute=t.minute, hour=t.hour) elif self.interval == JobExecutionType.TYPE_WEEKLY: return schedules.crontab(minute=t.minute, hour=t.hour, day_of_week=t.weekday()) raise ValueError(f"I do not know to convert {self.interval} to a Cronjob!")
class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name='Crew', fields=[ ('name', models.CharField(max_length=100)), ('sanitized_name', models.CharField(max_length=100)), ('agency', models.CharField(max_length=100)), ('image', models.CharField(max_length=100)), ('wikipedia', models.CharField(max_length=100)), ('status', models.CharField(max_length=100)), ('id', models.CharField(max_length=24, primary_key=True, serialize=False)), ], options={ 'verbose_name': 'Crew', 'verbose_name_plural': 'Crew', }, ), migrations.CreateModel( name='Image', fields=[ ('name', models.CharField(max_length=100)), ('source', models.URLField(primary_key=True, serialize=False)), ('type', models.CharField(choices=[('patch', 'Patch'), ('image', 'Image'), ('portrait', 'Portrait')], max_length=10)), ('etag', models.CharField(max_length=100)), ('image', models.ImageField( height_field='height', upload_to=spacex.models.image.image_directory_path, width_field='width')), ('height', models.IntegerField(null=True)), ('width', models.IntegerField(null=True)), ], options={ 'verbose_name': 'Image', 'verbose_name_plural': 'Images', }, ), migrations.CreateModel( name='Launch', fields=[ ('significant', models.BooleanField(default=False)), ('fairings', models.JSONField(blank=True, null=True)), ('links', models.JSONField()), ('static_fire_date_utc', models.DateTimeField(blank=True, null=True)), ('static_fire_date_unix', models.IntegerField(blank=True, null=True)), ('tbd', models.BooleanField()), ('net', models.BooleanField()), ('window', models.IntegerField(blank=True, null=True)), ('success', models.BooleanField(blank=True, null=True)), ('details', models.TextField(blank=True, null=True)), ('auto_update', models.BooleanField()), ('launch_library_id', models.CharField(blank=True, max_length=100, null=True)), ('flight_number', models.IntegerField()), ('name', models.CharField(max_length=100)), ('sanitized_name', models.CharField(max_length=100)), ('date_utc', models.DateTimeField(blank=True, null=True)), ('date_unix', models.IntegerField(blank=True, null=True)), ('date_local', models.DateTimeField(blank=True, null=True)), ('date_precision', models.CharField(blank=True, max_length=100, null=True)), ('upcoming', models.BooleanField()), ('id', models.CharField(max_length=24, primary_key=True, serialize=False)), ('crew', models.ManyToManyField(to='spacex.Crew')), ('images', models.ManyToManyField(to='spacex.Image')), ], options={ 'verbose_name': 'launch', 'verbose_name_plural': 'launches', 'ordering': ['flight_number'], }, ), migrations.CreateModel( name='Launchpad', fields=[ ('name', models.CharField(max_length=100)), ('full_name', models.CharField(max_length=100)), ('sanitized_name', models.CharField(max_length=100)), ('locality', models.CharField(max_length=100)), ('region', models.CharField(max_length=100)), ('timezone', models.CharField(max_length=100)), ('latitude', models.FloatField()), ('longitude', models.FloatField()), ('launch_attempts', models.IntegerField()), ('launch_successes', models.IntegerField()), ('details', models.TextField()), ('status', models.CharField(max_length=100)), ('id', models.CharField(max_length=24, primary_key=True, serialize=False)), ('images', models.ManyToManyField(to='spacex.Image')), ], options={ 'verbose_name': 'Launchpad', 'verbose_name_plural': 'Launchpads', }, ), migrations.CreateModel( name='Rocket', fields=[ ('height', models.JSONField()), ('diameter', models.JSONField()), ('mass', models.JSONField()), ('first_stage', models.JSONField()), ('second_stage', models.JSONField()), ('engines', models.JSONField()), ('landing_legs', models.JSONField()), ('payload_weights', models.JSONField()), ('flickr_images', models.JSONField()), ('name', models.CharField(max_length=100)), ('sanitized_name', models.CharField(max_length=100)), ('type', models.CharField(max_length=100)), ('active', models.BooleanField()), ('stages', models.IntegerField()), ('boosters', models.IntegerField()), ('cost_per_launch', models.IntegerField()), ('success_rate_pct', models.IntegerField()), ('first_flight', models.DateField()), ('country', models.CharField(max_length=100)), ('company', models.CharField(max_length=100)), ('wikipedia', models.CharField(max_length=100)), ('description', models.TextField()), ('id', models.CharField(max_length=24, primary_key=True, serialize=False)), ('images', models.ManyToManyField(to='spacex.Image')), ], ), migrations.CreateModel( name='Payload', fields=[ ('dragon', models.JSONField()), ('name', models.CharField(max_length=100)), ('sanitized_name', models.CharField(max_length=100)), ('type', models.CharField(max_length=100)), ('reused', models.BooleanField()), ('customers', models.JSONField()), ('norad_ids', models.JSONField(blank=True, null=True)), ('nationalities', models.JSONField()), ('manufacturers', models.JSONField()), ('mass_kg', models.FloatField(blank=True, null=True)), ('mass_lbs', models.FloatField(blank=True, null=True)), ('orbit', models.CharField(blank=True, max_length=100, null=True)), ('reference_system', models.CharField(blank=True, max_length=100, null=True)), ('regime', models.CharField(blank=True, max_length=100, null=True)), ('lifespan_years', models.IntegerField(blank=True, null=True)), ('id', models.CharField(max_length=24, primary_key=True, serialize=False)), ('launch', models.ForeignKey( null=True, on_delete=django.db.models.deletion.SET_NULL, to='spacex.launch')), ], options={ 'verbose_name': 'Payload', 'verbose_name_plural': 'Payloads', }, ), migrations.CreateModel( name='LaunchpadImage', fields=[ ('id', models.AutoField(primary_key=True, serialize=False)), ('image_location', models.CharField(max_length=200)), ('launchpad', models.ForeignKey( on_delete=django.db.models.deletion.DO_NOTHING, to='spacex.launchpad')), ], options={ 'verbose_name': 'Launchpad Image', 'verbose_name_plural': 'Launchpad Images', }, ), migrations.AddField( model_name='launchpad', name='rockets', field=models.ManyToManyField(to='spacex.Rocket'), ), migrations.AddField( model_name='launch', name='launchpad', field=models.ForeignKey( null=True, on_delete=django.db.models.deletion.SET_NULL, to='spacex.launchpad'), ), migrations.AddField( model_name='launch', name='rocket', field=models.ForeignKey( null=True, on_delete=django.db.models.deletion.SET_NULL, to='spacex.rocket'), ), migrations.CreateModel( name='Landpad', fields=[ ('name', models.CharField(max_length=100)), ('full_name', models.CharField(max_length=100)), ('sanitized_name', models.CharField(max_length=100)), ('type', models.CharField(max_length=100)), ('locality', models.CharField(max_length=100)), ('latitude', models.FloatField()), ('longitude', models.FloatField()), ('landing_attempts', models.IntegerField()), ('landing_successes', models.IntegerField()), ('wikipedia', models.CharField(max_length=100)), ('details', models.TextField()), ('status', models.CharField(max_length=100)), ('id', models.CharField(max_length=24, primary_key=True, serialize=False)), ('images', models.ManyToManyField(to='spacex.Image')), ], options={ 'verbose_name': 'Landpad', 'verbose_name_plural': 'Landpads', }, ), migrations.CreateModel( name='Dragon', fields=[ ('heat_shield', models.JSONField()), ('launch_payload_mass', models.JSONField()), ('launch_payload_vol', models.JSONField()), ('return_payload_mass', models.JSONField()), ('return_payload_vol', models.JSONField()), ('pressurized_capsule', models.JSONField()), ('trunk', models.JSONField()), ('height_w_trunk', models.JSONField()), ('diameter', models.JSONField()), ('first_flight', models.DateField()), ('flickr_images', models.JSONField()), ('name', models.CharField(max_length=100)), ('sanitized_name', models.CharField(max_length=100)), ('active', models.BooleanField()), ('crew_capacity', models.IntegerField()), ('sidewall_angle_deg', models.IntegerField()), ('orbit_duration_yr', models.IntegerField()), ('dry_mass_kg', models.IntegerField()), ('dry_mass_lb', models.IntegerField()), ('thrusters', models.JSONField()), ('wikipedia', models.CharField(max_length=100)), ('description', models.TextField()), ('id', models.CharField(max_length=24, primary_key=True, serialize=False)), ('images', models.ManyToManyField(to='spacex.Image')), ], options={ 'verbose_name': 'Dragon', 'verbose_name_plural': 'Dragons', }, ), migrations.AddField( model_name='crew', name='images', field=models.ManyToManyField(to='spacex.Image'), ), migrations.CreateModel( name='Core', fields=[ ('block', models.IntegerField(blank=True, null=True)), ('reuse_count', models.IntegerField(blank=True, null=True)), ('rtls_attempts', models.IntegerField(blank=True, null=True)), ('rtls_landings', models.IntegerField(blank=True, null=True)), ('asds_attempts', models.IntegerField(blank=True, null=True)), ('asds_landings', models.IntegerField(blank=True, null=True)), ('last_update', models.TextField(blank=True, null=True)), ('serial', models.CharField(max_length=5)), ('status', models.CharField(max_length=100)), ('id', models.CharField(max_length=24, primary_key=True, serialize=False)), ('launches', models.ManyToManyField(to='spacex.Launch')), ], options={ 'verbose_name': 'Core', 'verbose_name_plural': 'Cores', }, ), ]