class RackRole(OrganizationalModel): """ Racks can be organized by functional role, similar to Devices. """ name = models.CharField(max_length=100, unique=True) slug = AutoSlugField(populate_from="name") color = ColorField(default=ColorChoices.COLOR_GREY) description = models.CharField( max_length=200, blank=True, ) csv_headers = ["name", "slug", "color", "description"] class Meta: ordering = ["name"] def __str__(self): return self.name def get_absolute_url(self): return reverse("dcim:rackrole", args=[self.pk]) def to_csv(self): return ( self.name, self.slug, self.color, self.description, )
class Role(OrganizationalModel): """ A Role represents the functional role of a Prefix or VLAN; for example, "Customer," "Infrastructure," or "Management." """ name = models.CharField(max_length=100, unique=True) slug = AutoSlugField(populate_from="name") weight = models.PositiveSmallIntegerField(default=1000) description = models.CharField( max_length=200, blank=True, ) csv_headers = ["name", "slug", "weight", "description"] class Meta: ordering = ["weight", "name"] def __str__(self): return self.name def get_absolute_url(self): return reverse("ipam:role", args=[self.slug]) def to_csv(self): return ( self.name, self.slug, self.weight, self.description, )
class Provider(PrimaryModel): """ Each Circuit belongs to a Provider. This is usually a telecommunications company or similar organization. This model stores information pertinent to the user's relationship with the Provider. """ name = models.CharField(max_length=100, unique=True) slug = AutoSlugField(populate_from="name") asn = ASNField( blank=True, null=True, verbose_name="ASN", help_text="32-bit autonomous system number", ) account = models.CharField(max_length=30, blank=True, verbose_name="Account number") portal_url = models.URLField(blank=True, verbose_name="Portal URL") noc_contact = models.TextField(blank=True, verbose_name="NOC contact") admin_contact = models.TextField(blank=True, verbose_name="Admin contact") comments = models.TextField(blank=True) csv_headers = [ "name", "slug", "asn", "account", "portal_url", "noc_contact", "admin_contact", "comments", ] clone_fields = [ "asn", "account", "portal_url", "noc_contact", "admin_contact", ] class Meta: ordering = ["name"] def __str__(self): return self.name def get_absolute_url(self): return reverse("circuits:provider", args=[self.slug]) def to_csv(self): return ( self.name, self.slug, self.asn, self.account, self.portal_url, self.noc_contact, self.admin_contact, self.comments, )
class CircuitType(OrganizationalModel): """ Circuits can be organized by their functional role. For example, a user might wish to define CircuitTypes named "Long Haul," "Metro," or "Out-of-Band". """ name = models.CharField(max_length=100, unique=True) slug = AutoSlugField(populate_from="name") description = models.CharField( max_length=200, blank=True, ) csv_headers = ["name", "slug", "description"] class Meta: ordering = ["name"] def __str__(self): return self.name def get_absolute_url(self): return reverse("circuits:circuittype", args=[self.slug]) def to_csv(self): return ( self.name, self.slug, self.description, )
class RIR(OrganizationalModel): """ A Regional Internet Registry (RIR) is responsible for the allocation of a large portion of the global IP address space. This can be an organization like ARIN or RIPE, or a governing standard such as RFC 1918. """ name = models.CharField(max_length=100, unique=True) slug = AutoSlugField(populate_from="name") is_private = models.BooleanField( default=False, verbose_name="Private", help_text="IP space managed by this RIR is considered private", ) description = models.CharField(max_length=200, blank=True) csv_headers = ["name", "slug", "is_private", "description"] class Meta: ordering = ["name"] verbose_name = "RIR" verbose_name_plural = "RIRs" def __str__(self): return self.name def get_absolute_url(self): return reverse("ipam:rir", args=[self.slug]) def to_csv(self): return ( self.name, self.slug, self.is_private, self.description, )
class SecretsGroup(OrganizationalModel): """A group of related Secrets.""" name = models.CharField(max_length=100, unique=True) slug = AutoSlugField(populate_from="name", unique=True) description = models.CharField(max_length=200, blank=True) secrets = models.ManyToManyField(to=Secret, related_name="groups", through="extras.SecretsGroupAssociation", blank=True) csv_headers = ["name", "slug", "description"] def __str__(self): return self.name def get_absolute_url(self): return reverse("extras:secretsgroup", args=[self.slug]) def to_csv(self): return (self.name, self.slug, self.description) def get_secret_value(self, access_type, secret_type, obj=None, **kwargs): """Helper method to retrieve a specific secret from this group. May raise SecretError and/or Django ObjectDoesNotExist exceptions; it's up to the caller to handle those. """ secret = self.secrets.through.objects.get( group=self, access_type=access_type, secret_type=secret_type).secret return secret.get_value(obj=obj, **kwargs)
class ClusterGroup(OrganizationalModel): """ An organizational group of Clusters. """ name = models.CharField(max_length=100, unique=True) slug = AutoSlugField(populate_from="name") description = models.CharField(max_length=200, blank=True) csv_headers = ["name", "slug", "description"] class Meta: ordering = ["name"] def __str__(self): return self.name def get_absolute_url(self): return reverse("virtualization:clustergroup", args=[self.slug]) def to_csv(self): return ( self.name, self.slug, self.description, )
class Platform(OrganizationalModel): """ Platform refers to the software or firmware running on a Device. For example, "Cisco IOS-XR" or "Juniper Junos". Nautobot uses Platforms to determine how to interact with devices when pulling inventory data or other information by specifying a NAPALM driver. """ name = models.CharField(max_length=100, unique=True) slug = AutoSlugField(populate_from="name") manufacturer = models.ForeignKey( to="dcim.Manufacturer", on_delete=models.PROTECT, related_name="platforms", blank=True, null=True, help_text="Optionally limit this platform to devices of a certain manufacturer", ) napalm_driver = models.CharField( max_length=50, blank=True, verbose_name="NAPALM driver", help_text="The name of the NAPALM driver to use when interacting with devices", ) napalm_args = models.JSONField( encoder=DjangoJSONEncoder, blank=True, null=True, verbose_name="NAPALM arguments", help_text="Additional arguments to pass when initiating the NAPALM driver (JSON format)", ) description = models.CharField(max_length=200, blank=True) csv_headers = [ "name", "slug", "manufacturer", "napalm_driver", "napalm_args", "description", ] class Meta: ordering = ["name"] def __str__(self): return self.name def get_absolute_url(self): return reverse("dcim:platform", args=[self.slug]) def to_csv(self): return ( self.name, self.slug, self.manufacturer.name if self.manufacturer else None, self.napalm_driver, self.napalm_args, self.description, )
class ConfigContextSchema(OrganizationalModel): """ This model stores jsonschema documents where are used to optionally validate config context data payloads. """ name = models.CharField(max_length=200, unique=True) description = models.CharField(max_length=200, blank=True) slug = AutoSlugField(populate_from="name", max_length=200, unique=None, db_index=True) data_schema = models.JSONField( help_text= "A JSON Schema document which is used to validate a config context object." ) # A ConfigContextSchema *may* be owned by another model, such as a GitRepository, or it may be un-owned owner_content_type = models.ForeignKey( to=ContentType, on_delete=models.CASCADE, limit_choices_to=FeatureQuery("config_context_owners"), default=None, null=True, blank=True, ) owner_object_id = models.UUIDField(default=None, null=True, blank=True) owner = GenericForeignKey( ct_field="owner_content_type", fk_field="owner_object_id", ) def __str__(self): if self.owner: return f"[{self.owner}] {self.name}" return self.name def get_absolute_url(self): return reverse("extras:configcontextschema", args=[self.slug]) def clean(self): """ Validate the schema """ super().clean() try: Draft7Validator.check_schema(self.data_schema) except SchemaError as e: raise ValidationError({"data_schema": e.message}) if (not isinstance(self.data_schema, dict) or "properties" not in self.data_schema or self.data_schema.get("type") != "object"): raise ValidationError({ "data_schema": "Nautobot only supports context data in the form of an object and thus the " "JSON schema must be of type object and specify a set of properties." })
class VLANGroup(OrganizationalModel): """ A VLAN group is an arbitrary collection of VLANs within which VLAN IDs and names must be unique. """ name = models.CharField(max_length=100, db_index=True) # TODO: Remove unique=None to make slug globally unique. This would be a breaking change. slug = AutoSlugField(populate_from="name", unique=None, db_index=True) site = models.ForeignKey( to="dcim.Site", on_delete=models.PROTECT, related_name="vlan_groups", blank=True, null=True, ) description = models.CharField(max_length=200, blank=True) csv_headers = ["name", "slug", "site", "description"] class Meta: ordering = ( "site", "name", ) # (site, name) may be non-unique unique_together = [ ["site", "name"], # TODO: Remove unique_together to make slug globally unique. This would be a breaking change. ["site", "slug"], ] verbose_name = "VLAN group" verbose_name_plural = "VLAN groups" def __str__(self): return self.name def get_absolute_url(self): return reverse("ipam:vlangroup", args=[self.pk]) def to_csv(self): return ( self.name, self.slug, self.site.name if self.site else None, self.description, ) def get_next_available_vid(self): """ Return the first available VLAN ID (1-4094) in the group. """ vlan_ids = VLAN.objects.filter(group=self).values_list("vid", flat=True) for i in range(1, 4095): if i not in vlan_ids: return i return None
class ComputedField(BaseModel, ChangeLoggedModel): """ Read-only rendered fields driven by a Jinja2 template that are applied to objects within a ContentType. """ content_type = models.ForeignKey( to=ContentType, on_delete=models.CASCADE, limit_choices_to=FeatureQuery("custom_fields"), ) slug = AutoSlugField(populate_from="label", help_text="Internal field name") label = models.CharField( max_length=100, help_text="Name of the field as displayed to users") description = models.CharField(max_length=200, blank=True) template = models.TextField( max_length=500, help_text="Jinja2 template code for field value") fallback_value = models.CharField( max_length=500, blank=True, help_text= "Fallback value (if any) to be output for the field in the case of a template rendering error.", ) weight = models.PositiveSmallIntegerField(default=100) objects = ComputedFieldManager() clone_fields = [ "content_type", "description", "template", "fallback_value", "weight" ] class Meta: ordering = ["weight", "slug"] unique_together = ("content_type", "label") def __str__(self): return self.label def get_absolute_url(self): return reverse("extras:computedfield", args=[self.slug]) def render(self, context): try: rendered = render_jinja2(self.template, context) # If there is an undefined variable within a template, it returns nothing # Doesn't raise an exception either most likely due to using Undefined rather # than StrictUndefined, but return fallback_value if None is returned if rendered is None: logger.warning("Failed to render computed field %s", self.slug) return self.fallback_value return rendered except Exception as exc: logger.warning("Failed to render computed field %s: %s", self.slug, exc) return self.fallback_value
class Region(MPTTModel, OrganizationalModel): """ Sites can be grouped within geographic Regions. """ parent = TreeForeignKey( to="self", on_delete=models.CASCADE, related_name="children", blank=True, null=True, db_index=True, ) name = models.CharField(max_length=100, unique=True) slug = AutoSlugField(populate_from="name") description = models.CharField(max_length=200, blank=True) objects = TreeManager() csv_headers = ["name", "slug", "parent", "description"] class MPTTMeta: order_insertion_by = ["name"] def __str__(self): return self.name def get_absolute_url(self): return reverse("dcim:region", args=[self.pk]) def to_csv(self): return ( self.name, self.slug, self.parent.name if self.parent else None, self.description, ) def get_site_count(self): return Site.objects.filter( Q(region=self) | Q(region__in=self.get_descendants())).count() def to_objectchange(self, action): # Remove MPTT-internal fields return ObjectChange( changed_object=self, object_repr=str(self), action=action, object_data=serialize_object( self, exclude=["level", "lft", "rght", "tree_id"]), )
class TenantGroup(MPTTModel, OrganizationalModel): """ An arbitrary collection of Tenants. """ name = models.CharField(max_length=100, unique=True) slug = AutoSlugField(populate_from="name") parent = TreeForeignKey( to="self", on_delete=models.CASCADE, related_name="children", blank=True, null=True, db_index=True, ) description = models.CharField(max_length=200, blank=True) objects = TreeManager() csv_headers = ["name", "slug", "parent", "description"] class Meta: ordering = ["name"] class MPTTMeta: order_insertion_by = ["name"] def __str__(self): return self.name def get_absolute_url(self): return reverse("tenancy:tenantgroup", args=[self.slug]) def to_csv(self): return ( self.name, self.slug, self.parent.name if self.parent else "", self.description, ) def to_objectchange(self, action): # Remove MPTT-internal fields return ObjectChange( changed_object=self, object_repr=str(self), action=action, object_data=serialize_object( self, exclude=["level", "lft", "rght", "tree_id"]), )
class GraphQLQuery(BaseModel, ChangeLoggedModel): name = models.CharField(max_length=100, unique=True) slug = AutoSlugField(populate_from="name") query = models.TextField() variables = models.JSONField(encoder=DjangoJSONEncoder, default=dict, blank=True) class Meta: ordering = ("slug", ) verbose_name = "GraphQL query" verbose_name_plural = "GraphQL queries" def get_absolute_url(self): return reverse("extras:graphqlquery", kwargs={"slug": self.slug}) def save(self, *args, **kwargs): variables = {} schema = graphene_settings.SCHEMA backend = get_default_backend() # Load query into GraphQL backend document = backend.document_from_string(schema, self.query) # Inspect the parsed document tree (document.document_ast) to retrieve the query (operation) definition(s) # that define one or more variables. For each operation and variable definition, store the variable's # default value (if any) into our own "variables" dict. definitions = [ d for d in document.document_ast.definitions if isinstance(d, OperationDefinition) and d.variable_definitions ] for definition in definitions: for variable_definition in definition.variable_definitions: default = variable_definition.default_value.value if variable_definition.default_value else "" variables[variable_definition.variable.name.value] = default self.variables = variables return super().save(*args, **kwargs) def clean(self): super().clean() schema = graphene_settings.SCHEMA backend = get_default_backend() try: backend.document_from_string(schema, self.query) except GraphQLSyntaxError as error: raise ValidationError({"query": error}) def __str__(self): return self.name
class Status(BaseModel, ChangeLoggedModel, CustomFieldModel, RelationshipModel): """Model for database-backend enum choice objects.""" content_types = models.ManyToManyField( to=ContentType, related_name="statuses", verbose_name="Content type(s)", limit_choices_to=FeatureQuery("statuses"), help_text="The content type(s) to which this status applies.", ) name = models.CharField(max_length=50, unique=True) color = ColorField(default=ColorChoices.COLOR_GREY) slug = AutoSlugField(populate_from="name", max_length=50) description = models.CharField( max_length=200, blank=True, ) objects = StatusQuerySet.as_manager() csv_headers = ["name", "slug", "color", "content_types", "description"] clone_fields = ["color", "content_types"] class Meta: ordering = ["name"] verbose_name_plural = "statuses" def __str__(self): return self.name def get_absolute_url(self): return reverse("extras:status", args=[self.slug]) def to_csv(self): labels = ",".join(f"{ct.app_label}.{ct.model}" for ct in self.content_types.all()) return ( self.name, self.slug, self.color, f'"{labels}"', # Wrap labels in double quotes for CSV self.description, )
class Tenant(PrimaryModel): """ A Tenant represents an organization served by the Nautobot owner. This is typically a customer or an internal department. """ name = models.CharField(max_length=100, unique=True) slug = AutoSlugField(populate_from="name") group = models.ForeignKey( to="tenancy.TenantGroup", on_delete=models.SET_NULL, related_name="tenants", blank=True, null=True, ) description = models.CharField(max_length=200, blank=True) comments = models.TextField(blank=True) csv_headers = ["name", "slug", "group", "description", "comments"] clone_fields = [ "group", "description", ] class Meta: ordering = ["group", "name"] def __str__(self): return self.name def get_absolute_url(self): return reverse("tenancy:tenant", args=[self.slug]) def to_csv(self): return ( self.name, self.slug, self.group.name if self.group else None, self.description, self.comments, )
class ProviderNetwork(PrimaryModel): name = models.CharField(max_length=100, db_index=True) slug = AutoSlugField(populate_from="name") provider = models.ForeignKey(to="circuits.Provider", on_delete=models.PROTECT, related_name="provider_networks") description = models.CharField(max_length=200, blank=True) comments = models.TextField(blank=True) csv_headers = [ "provider", "name", "slug", "description", "comments", ] class Meta: ordering = ("provider", "name") constraints = ( models.UniqueConstraint(fields=("provider", "name"), name="circuits_providernetwork_provider_name"), ) unique_together = ("provider", "name") def __str__(self): return self.name def get_absolute_url(self): return reverse("circuits:providernetwork", args=[self.slug]) def to_csv(self): return ( self.provider.name, self.name, self.slug, self.description, self.comments, ) @property def display(self): return f"{self.provider} {self.name}"
class Manufacturer(OrganizationalModel): """ A Manufacturer represents a company which produces hardware devices; for example, Juniper or Dell. """ name = models.CharField(max_length=100, unique=True) slug = AutoSlugField(populate_from="name") description = models.CharField(max_length=200, blank=True) csv_headers = ["name", "slug", "description"] class Meta: ordering = ["name"] def __str__(self): return self.name def get_absolute_url(self): return reverse("dcim:manufacturer", args=[self.slug]) def to_csv(self): return (self.name, self.slug, self.description)
class DeviceRole(OrganizationalModel): """ Devices are organized by functional role; for example, "Core Switch" or "File Server". Each DeviceRole is assigned a color to be used when displaying rack elevations. The vm_role field determines whether the role is applicable to virtual machines as well. """ name = models.CharField(max_length=100, unique=True) slug = AutoSlugField(populate_from="name") color = ColorField(default=ColorChoices.COLOR_GREY) vm_role = models.BooleanField( default=True, verbose_name="VM Role", help_text="Virtual machines may be assigned to this role", ) description = models.CharField( max_length=200, blank=True, ) csv_headers = ["name", "slug", "color", "vm_role", "description"] class Meta: ordering = ["name"] def get_absolute_url(self): return reverse("dcim:devicerole", args=[self.slug]) def __str__(self): return self.name def to_csv(self): return ( self.name, self.slug, self.color, self.vm_role, self.description, )
class Job(PrimaryModel): """ Database model representing an installed Job class. """ # Information used to locate the Job source code source = models.CharField( max_length=JOB_MAX_SOURCE_LENGTH, choices=JobSourceChoices, editable=False, db_index=True, help_text= "Source of the Python code for this job - local, Git repository, or plugins", ) git_repository = models.ForeignKey( to="extras.GitRepository", blank=True, null=True, default=None, on_delete=models.SET_NULL, db_index=True, related_name="jobs", help_text="Git repository that provides this job", ) module_name = models.CharField( max_length=JOB_MAX_NAME_LENGTH, editable=False, db_index=True, help_text="Dotted name of the Python module providing this job", ) job_class_name = models.CharField( max_length=JOB_MAX_NAME_LENGTH, editable=False, db_index=True, help_text="Name of the Python class providing this job", ) slug = AutoSlugField( max_length=JOB_MAX_SLUG_LENGTH, populate_from=["class_path"], slugify_function=slugify_dots_to_dashes, ) # Human-readable information, potentially inherited from the source code # See also the docstring of nautobot.extras.jobs.BaseJob.Meta. grouping = models.CharField( max_length=JOB_MAX_GROUPING_LENGTH, help_text="Human-readable grouping that this job belongs to", db_index=True, ) name = models.CharField( max_length=JOB_MAX_NAME_LENGTH, help_text="Human-readable name of this job", db_index=True, ) description = models.TextField( blank=True, help_text="Markdown formatting is supported") # Control flags installed = models.BooleanField( default=True, db_index=True, editable=False, help_text= "Whether the Python module and class providing this job are presently installed and loadable", ) enabled = models.BooleanField( default=False, help_text="Whether this job can be executed by users") # Additional properties, potentially inherited from the source code # See also the docstring of nautobot.extras.jobs.BaseJob.Meta. approval_required = models.BooleanField( default=False, help_text= "Whether the job requires approval from another user before running") commit_default = models.BooleanField( default=True, help_text= "Whether the job defaults to committing changes when run, or defaults to a dry-run" ) hidden = models.BooleanField( default=False, db_index=True, help_text="Whether the job defaults to not being shown in the UI", ) # Job.Meta.field_order is not overridable in this model read_only = models.BooleanField( default=False, help_text= "Whether the job is prevented from making lasting changes to the database" ) soft_time_limit = models.FloatField( default=0, validators=[MinValueValidator(0)], help_text= "Maximum runtime in seconds before the job will receive a <code>SoftTimeLimitExceeded</code> " "exception.<br>Set to 0 to use Nautobot system default", ) time_limit = models.FloatField( default=0, validators=[MinValueValidator(0)], help_text= "Maximum runtime in seconds before the job will be forcibly terminated." "<br>Set to 0 to use Nautobot system default", ) # Flags to indicate whether the above properties are inherited from the source code or overridden by the database grouping_override = models.BooleanField( default=False, help_text= "If set, the configured grouping will remain even if the underlying Job source code changes", ) name_override = models.BooleanField( default=False, help_text= "If set, the configured name will remain even if the underlying Job source code changes", ) description_override = models.BooleanField( default=False, help_text= "If set, the configured description will remain even if the underlying Job source code changes", ) approval_required_override = models.BooleanField( default=False, help_text= "If set, the configured value will remain even if the underlying Job source code changes", ) commit_default_override = models.BooleanField( default=False, help_text= "If set, the configured value will remain even if the underlying Job source code changes", ) hidden_override = models.BooleanField( default=False, help_text= "If set, the configured value will remain even if the underlying Job source code changes", ) read_only_override = models.BooleanField( default=False, help_text= "If set, the configured value will remain even if the underlying Job source code changes", ) soft_time_limit_override = models.BooleanField( default=False, help_text= "If set, the configured value will remain even if the underlying Job source code changes", ) time_limit_override = models.BooleanField( default=False, help_text= "If set, the configured value will remain even if the underlying Job source code changes", ) objects = JobQuerySet.as_manager() class Meta: managed = True ordering = ["grouping", "name"] unique_together = [ ("source", "git_repository", "module_name", "job_class_name"), ("grouping", "name"), ] def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._job_class = None self._latest_result = None def __str__(self): return self.name def validate_unique(self, exclude=None): """ Check for duplicate (source, module_name, job_class_name) in the case where git_repository is None. This is needed because NULL != NULL and so the unique_together constraint will not flag this case. """ if self.git_repository is None: if Job.objects.exclude(pk=self.pk).filter( source=self.source, module_name=self.module_name, job_class_name=self.job_class_name): raise ValidationError({ "job_class_name": "A Job already exists with this source, module_name, and job_class_name" }) super().validate_unique(exclude=exclude) @property def job_class(self): """Get the Job class (source code) associated with this Job model.""" if not self.installed: return None if self._job_class is None: if self.source == JobSourceChoices.SOURCE_LOCAL: path = settings.JOBS_ROOT for job_info in jobs_in_directory( settings.JOBS_ROOT, module_name=self.module_name): if job_info.job_class_name == self.job_class_name: self._job_class = job_info.job_class break else: logger.warning("Module %s job class %s not found!", self.module_name, self.job_class_name) elif self.source == JobSourceChoices.SOURCE_GIT: from nautobot.extras.datasources.git import ensure_git_repository if self.git_repository is None: logger.warning( "Job %s %s has no associated Git repository", self.module_name, self.job_class_name) return None try: # In the case where we have multiple Nautobot instances, or multiple RQ worker instances, # they are not required to share a common filesystem; therefore, we may need to refresh our local # clone of the Git repository to ensure that it is in sync with the latest repository clone # from any instance. ensure_git_repository( self.git_repository, head=self.git_repository.current_head, logger=logger, ) path = os.path.join(self.git_repository.filesystem_path, "jobs") for job_info in jobs_in_directory( path, module_name=self.module_name): if job_info.job_class_name == self.job_class_name: self._job_class = job_info.job_class break else: logger.warning( "Module %s job class %s not found in repository %s", self.module_name, self.job_class_name, self.git_repository, ) except ObjectDoesNotExist: return None except Exception as exc: logger.error( f"Error during local clone/refresh of Git repository {self.git_repository}: {exc}" ) return None elif self.source == JobSourceChoices.SOURCE_PLUGIN: # pkgutil.resolve_name is only available in Python 3.9 and later self._job_class = import_object( f"{self.module_name}.{self.job_class_name}") return self._job_class @property def class_path(self): if self.git_repository is not None: return f"{self.source}.{self.git_repository.slug}/{self.module_name}/{self.job_class_name}" return f"{self.source}/{self.module_name}/{self.job_class_name}" @property def latest_result(self): if self._latest_result is None: self._latest_result = self.results.last() return self._latest_result @property def description_first_line(self): return self.description.splitlines()[0] @property def runnable(self): return self.enabled and self.installed and self.job_class is not None def clean(self): """For any non-overridden fields, make sure they get reset to the actual underlying class value if known.""" if self.job_class is not None: for field_name in JOB_OVERRIDABLE_FIELDS: if not getattr(self, f"{field_name}_override", False): setattr(self, field_name, getattr(self.job_class, field_name)) if self.git_repository is not None and self.source != JobSourceChoices.SOURCE_GIT: raise ValidationError( 'A Git repository may only be specified when the source is "git"' ) # Protect against invalid input when auto-creating Job records if len(self.source) > JOB_MAX_SOURCE_LENGTH: raise ValidationError( f"Source may not exceed {JOB_MAX_SOURCE_LENGTH} characters in length" ) if len(self.module_name) > JOB_MAX_NAME_LENGTH: raise ValidationError( f"Module name may not exceed {JOB_MAX_NAME_LENGTH} characters in length" ) if len(self.job_class_name) > JOB_MAX_NAME_LENGTH: raise ValidationError( f"Job class name may not exceed {JOB_MAX_NAME_LENGTH} characters in length" ) if len(self.grouping) > JOB_MAX_GROUPING_LENGTH: raise ValidationError( "Grouping may not exceed {JOB_MAX_GROUPING_LENGTH} characters in length" ) if len(self.name) > JOB_MAX_NAME_LENGTH: raise ValidationError( f"Name may not exceed {JOB_MAX_NAME_LENGTH} characters in length" ) if len(self.slug) > JOB_MAX_SLUG_LENGTH: raise ValidationError( f"Slug may not exceed {JOB_MAX_SLUG_LENGTH} characters in length" ) def get_absolute_url(self): return reverse("extras:job_detail", kwargs={"slug": self.slug})
class Secret(PrimaryModel): """ Data model providing access to a "secret" such as a device credential or a systems-integration token. Note that this model **does not** STORE the actual secret data, rather it provides ACCESS to the secret for other Nautobot models and APIs to make use of as needed and appropriate. """ name = models.CharField(max_length=100, unique=True) slug = AutoSlugField(populate_from="name") description = models.CharField(max_length=200, blank=True) provider = models.CharField(max_length=100) parameters = models.JSONField(encoder=DjangoJSONEncoder, default=dict) csv_headers = [ "name", "slug", "description", "provider", "parameters", ] clone_fields = [ "provider", ] class Meta: ordering = ["name"] def __str__(self): return self.name def get_absolute_url(self): return reverse("extras:secret", args=[self.slug]) def to_csv(self): return ( self.name, self.slug, self.description, self.provider, self.parameters, ) def rendered_parameters(self, obj=None): """Render self.parameters as a Jinja2 template with the given object as context.""" try: return { key: render_jinja2(value, {"obj": obj}) for key, value in self.parameters.items() } except (TemplateSyntaxError, UndefinedError) as exc: raise SecretParametersError( self, registry["secrets_providers"].get(self.provider), str(exc)) from exc def get_value(self, obj=None): """Retrieve the secret value that this Secret is a representation of. May raise a SecretError on failure. Args: obj (object): Object (Django model or similar) that may provide additional context for this secret. """ provider = registry["secrets_providers"].get(self.provider) if not provider: raise SecretProviderError( self, None, f'No registered provider "{self.provider}" is available') try: return provider.get_value_for_secret(self, obj=obj) except SecretError: raise except Exception as exc: raise SecretError(self, provider, str(exc)) from exc def clean(self): provider = registry["secrets_providers"].get(self.provider) if not provider: raise ValidationError({ "provider": f'No registered provider "{self.provider}" is available' }) # Apply any provider-specific validation of the parameters form = provider.ParametersForm(self.parameters) form.is_valid() form.clean()
class DynamicGroup(OrganizationalModel): """Dynamic Group Model.""" name = models.CharField(max_length=100, unique=True, help_text="Dynamic Group name") slug = AutoSlugField(max_length=100, unique=True, help_text="Unique slug", populate_from="name") description = models.CharField(max_length=200, blank=True) content_type = models.ForeignKey( to=ContentType, on_delete=models.CASCADE, verbose_name="Object Type", help_text="The type of object for this Dynamic Group.", ) # TODO(jathan): Set editable=False that this field doesn't show up in forms. # It's a complex field only modified by the DynamicGroupForm internals at # this time. I am not happy with this pattern right now but due to the # dynamism of the form by merging in the `FooFilterForm` fields, there's not # currently an easy way to move this construction logic to the model. filter = models.JSONField( encoder=DjangoJSONEncoder, editable=False, default=dict, help_text="A JSON-encoded dictionary of filter parameters for group membership", ) objects = DynamicGroupQuerySet.as_manager() clone_fields = ["content_type", "filter"] # This is used as a `startswith` check on field names, so these can be # explicit fields or just substrings. # # Currently this means skipping "search", custom fields, and custom relationships. # # FIXME(jathan): As one example, `DeviceFilterSet.q` filter searches in `comments`. The issue # really being that this field renders as a textarea and it's not cute in the UI. Might be able # to dynamically change the widget if we decide we do want to support this field. # # Type: tuple exclude_filter_fields = ("q", "cr", "cf", "comments") # Must be a tuple class Meta: ordering = ["content_type", "name"] def __str__(self): return self.name @property def model(self): """ Access to the underlying Model class for this group's `content_type`. This class object is cached on the instance after the first time it is accessed. """ if getattr(self, "_model", None) is None: try: model = self.content_type.model_class() except models.ObjectDoesNotExist: model = None if model is not None: self._set_object_classes(model) self._model = model return self._model def _set_object_classes(self, model): """ Given the `content_type` for this group, dynamically map object classes to this instance. Protocol for return values: - True: Model and object classes mapped. - False: Model not yet mapped (likely because of no `content_type`) """ # If object classes have already been mapped, return True. if getattr(self, "_object_classes_mapped", False): return True # Try to set the object classes for this model. try: self.filterset_class = get_filterset_for_model(model) self.filterform_class = get_form_for_model(model, form_prefix="Filter") self.form_class = get_form_for_model(model) # We expect this to happen on new instances or in any case where `model` was not properly # available to the caller, so always fail closed. except TypeError: logger.debug("Failed to map object classes for model %s", model) self.filterset_class = None self.filterform_class = None self.form_class = None self._object_classes_mapped = False else: self._object_classes_mapped = True return self._object_classes_mapped @cached_property def _map_filter_fields(self): """Return all FilterForm fields in a dictionary.""" # Fail gracefully with an empty dict if nothing is working yet. if not self._set_object_classes(self.model): return {} # Get model form and fields modelform = self.form_class() modelform_fields = modelform.fields # Get filter form and fields filterform = self.filterform_class() filterform_fields = filterform.fields # Get filterset and fields filterset = self.filterset_class() filterset_fields = filterset.filters # Get dynamic group filter field mappings (if any) dynamic_group_filter_fields = getattr(self.model, "dynamic_group_filter_fields", {}) # Model form fields that aren't on the filter form missing_fields = set(modelform_fields).difference(filterform_fields) # Try a few ways to see if a missing field can be added to the filter fields. for missing_field in missing_fields: # Skip excluded fields if missing_field.startswith(self.exclude_filter_fields): logger.debug("Skipping excluded form field: %s", missing_field) continue # In some cases, fields exist in the model form AND by another name in the filter form # (e.g. model form: `cluster` -> filterset: `cluster_id`) yet are omitted from the # filter form (e.g. filter form has "cluster_id" but not "cluster"). We only want to add # them if-and-only-if they aren't already in `filterform_fields`. if missing_field in dynamic_group_filter_fields: mapped_field = dynamic_group_filter_fields[missing_field] if mapped_field in filterform_fields: logger.debug( "Skipping missing form field %s; mapped to %s filter field", missing_field, mapped_field ) continue # If the missing field isn't even in the filterset, move on. try: filterset_field = filterset_fields[missing_field] except KeyError: logger.debug("Skipping %s: doesn't have a filterset field", missing_field) continue # Get the missing model form field so we can use it to add to the filterform_fields. modelform_field = modelform_fields[missing_field] # Replace the modelform_field with the correct type for the UI. At this time this is # only being done for CharField since in the filterset form this ends up being a # `MultiValueCharField` (dynamically generated from from `MultiValueCharFilter`) which is # not correct for char fields. if isinstance(modelform_field, forms.CharField): # Get ready to replace the form field w/ correct widget. new_modelform_field = filterset_field.field new_modelform_field.widget = modelform_field.widget # If `required=True` was set on the model field, pop "required" from the widget # attributes. Filter fields should never be required! if modelform_field.required: new_modelform_field.widget.attrs.pop("required") modelform_field = new_modelform_field # Carry over the `to_field_name` to the modelform_field. to_field_name = filterset_field.extra.get("to_field_name") if to_field_name is not None: modelform_field.to_field_name = to_field_name logger.debug("Added %s (%s) to filter fields", missing_field, modelform_field.__class__.__name__) filterform_fields[missing_field] = modelform_field # Reduce down to a final dict of desired fields. return_fields = {} for field_name, filter_field in filterform_fields.items(): # Skip excluded fields if field_name.startswith(self.exclude_filter_fields): logger.debug("Skipping excluded filter field: %s", field_name) continue return_fields[field_name] = filter_field return return_fields def get_filter_fields(self): """Return a mapping of `{field_name: filter_field}` for this group's `content_type`.""" # Fail cleanly until the object has been created. if self.model is None: return {} if not self.present_in_database: return {} return self._map_filter_fields def get_queryset(self): """ Return a queryset for the `content_type` model of this group. The queryset is generated based on the `filterset_class` for the Model. """ model = self.model if model is None: raise RuntimeError(f"Could not determine queryset for model '{model}'") if not self.filter: return model.objects.none() filterset = self.filterset_class(self.filter, model.objects.all()) qs = filterset.qs # Make sure that this instance can't be a member of its own group. if self.present_in_database and model == self.__class__: qs = qs.exclude(pk=self.pk) return qs @property def members(self): """Return the member objects for this group.""" return self.get_queryset() @property def count(self): """Return the number of member objects in this group.""" return self.get_queryset().count() def get_absolute_url(self): return reverse("extras:dynamicgroup", kwargs={"slug": self.slug}) @property def members_base_url(self): """Return the list route name for this group's `content_type'.""" if self.model is None: return "" route_name = get_route_for_model(self.model, "list") return reverse(route_name) def get_group_members_url(self): """Get URL to group members.""" if self.model is None: return "" url = self.members_base_url filter_str = urllib.parse.urlencode(self.filter, doseq=True) if filter_str is not None: url += f"?{filter_str}" return url def set_filter(self, form_data): """ Set all desired fields from `form_data` into `filter` dict. :param form_data: Dict of filter parameters, generally from a filter form's `cleaned_data` """ # Get the authoritative source of filter fields we want to keep. filter_fields = self.get_filter_fields() # Populate the filterset from the incoming `form_data`. The filterset's internal form is # used for validation, will be used by us to extract cleaned data for final processing. filterset_class = self.filterset_class filterset_class.form_prefix = "filter" filterset = filterset_class(form_data) # Use the auto-generated filterset form perform creation of the filter dictionary. filterset_form = filterset.form # It's expected that the incoming data has already been cleaned by a form. This `is_valid()` # call is primarily to reduce the fields down to be able to work with the `cleaned_data` from the # filterset form, but will also catch errors in case a user-created dict is provided instead. if not filterset_form.is_valid(): raise ValidationError(filterset_form.errors) # Perform some type coercions so that they are URL-friendly and reversible, excluding any # empty/null value fields. new_filter = {} for field_name in filter_fields: field = filterset_form.fields[field_name] field_value = filterset_form.cleaned_data[field_name] if isinstance(field, forms.ModelMultipleChoiceField): field_to_query = field.to_field_name or "pk" new_value = [getattr(item, field_to_query) for item in field_value] elif isinstance(field, forms.ModelChoiceField): field_to_query = field.to_field_name or "pk" new_value = getattr(field_value, field_to_query, None) else: new_value = field_value # Don't store empty values like `None`, [], etc. if new_value in (None, "", [], {}): logger.debug("[%s] Not storing empty value (%s) for %s", self.name, field_value, field_name) continue logger.debug("[%s] Setting filter field {%s: %s}", self.name, field_name, field_value) new_filter[field_name] = new_value self.filter = new_filter # FIXME(jathan): Yes, this is "something", but there is discrepancy between explicitly declared # fields on `DeviceFilterForm` (for example) vs. the `DeviceFilterSet` filters. For example # `Device.name` becomes a `MultiValueCharFilter` that emits a `MultiValueCharField` which # expects a list of strings as input. The inverse is not true. It's easier to munge this # dictionary when we go to send it to the form, than it is to dynamically coerce the form field # types coming and going... For now. def get_initial(self): """ Return an form-friendly version of `self.filter` for initial form data. This is intended for use to populate the dynamically-generated filter form created by `generate_filter_form()`. """ filter_fields = self.get_filter_fields() initial_data = self.filter.copy() # Brute force to capture the names of any `*CharField` fields. char_fields = [f for (f, ftype) in filter_fields.items() if ftype.__class__.__name__.endswith("CharField")] # Iterate the char fields and coerce their type to a singular value or # an empty string in the case of an empty list. for char_field in char_fields: if char_field not in initial_data: continue field_value = initial_data[char_field] if isinstance(field_value, list): # Either the first (and should be only) item in this list. if field_value: new_value = field_value[0] # Or empty string if there isn't. else: new_value = "" initial_data[char_field] = new_value return initial_data def generate_filter_form(self): """ Generate a `FilterForm` class for use in `DynamicGroup` edit view. This form is used to popoulate and validate the filter dictionary. If a form cannot be created for some reason (such as on a new instance when rendering the UI "add" view), this will return `None`. """ filter_fields = self.get_filter_fields() # FIXME(jathan): Account for field_order in the newly generated class. try: class FilterForm(self.filterform_class): prefix = "filter" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields = filter_fields except (AttributeError, TypeError): return None return FilterForm def clean_filter(self): """Clean for `self.filter` that uses the filterset_class to validate.""" if not isinstance(self.filter, dict): raise ValidationError({"filter": "Filter must be a dict"}) # Accessing `self.model` will determine if the `content_type` is not correctly set, blocking validation. if self.model is None: raise ValidationError({"filter": "Filter requires a `content_type` to be set"}) # Validate against the filterset's internal form validation. filterset = self.filterset_class(self.filter) if not filterset.is_valid(): raise ValidationError(filterset.errors) def clean(self): super().clean() if self.present_in_database: # Check immutable fields database_object = self.__class__.objects.get(pk=self.pk) if self.content_type != database_object.content_type: raise ValidationError({"content_type": "ContentType cannot be changed once created"}) # Validate `filter` dict self.clean_filter()
class Site(PrimaryModel, StatusModel): """ A Site represents a geographic location within a network; typically a building or campus. The optional facility field can be used to include an external designation, such as a data center name (e.g. Equinix SV6). """ name = models.CharField(max_length=100, unique=True) _name = NaturalOrderingField(target_field="name", max_length=100, blank=True, db_index=True) slug = AutoSlugField(populate_from="name") region = models.ForeignKey( to="dcim.Region", on_delete=models.SET_NULL, related_name="sites", blank=True, null=True, ) tenant = models.ForeignKey( to="tenancy.Tenant", on_delete=models.PROTECT, related_name="sites", blank=True, null=True, ) facility = models.CharField(max_length=50, blank=True, help_text="Local facility ID or description") asn = ASNField( blank=True, null=True, verbose_name="ASN", help_text="32-bit autonomous system number", ) time_zone = TimeZoneField(blank=True) description = models.CharField(max_length=200, blank=True) physical_address = models.CharField(max_length=200, blank=True) shipping_address = models.CharField(max_length=200, blank=True) latitude = models.DecimalField( max_digits=8, decimal_places=6, blank=True, null=True, help_text="GPS coordinate (latitude)", ) longitude = models.DecimalField( max_digits=9, decimal_places=6, blank=True, null=True, help_text="GPS coordinate (longitude)", ) contact_name = models.CharField(max_length=50, blank=True) contact_phone = models.CharField(max_length=20, blank=True) contact_email = models.EmailField(blank=True, verbose_name="Contact E-mail") comments = models.TextField(blank=True) images = GenericRelation(to="extras.ImageAttachment") csv_headers = [ "name", "slug", "status", "region", "tenant", "facility", "asn", "time_zone", "description", "physical_address", "shipping_address", "latitude", "longitude", "contact_name", "contact_phone", "contact_email", "comments", ] clone_fields = [ "status", "region", "tenant", "facility", "asn", "time_zone", "description", "physical_address", "shipping_address", "latitude", "longitude", "contact_name", "contact_phone", "contact_email", ] class Meta: ordering = ("_name", ) def __str__(self): return self.name def get_absolute_url(self): return reverse("dcim:site", args=[self.slug]) def to_csv(self): return ( self.name, self.slug, self.get_status_display(), self.region.name if self.region else None, self.tenant.name if self.tenant else None, self.facility, self.asn, self.time_zone, self.description, self.physical_address, self.shipping_address, self.latitude, self.longitude, self.contact_name, self.contact_phone, self.contact_email, self.comments, )
class RackGroup(MPTTModel, OrganizationalModel): """ Racks can be grouped as subsets within a Site. The scope of a group will depend on how Sites are defined. For example, if a Site spans a corporate campus, a RackGroup might be defined to represent each building within that campus. If a Site instead represents a single building, a RackGroup might represent a single room or floor. """ name = models.CharField(max_length=100, db_index=True) # TODO: Remove unique=None to make slug globally unique. This would be a breaking change. slug = AutoSlugField(populate_from="name", unique=None, db_index=True) site = models.ForeignKey(to="dcim.Site", on_delete=models.CASCADE, related_name="rack_groups") parent = TreeForeignKey( to="self", on_delete=models.CASCADE, related_name="children", blank=True, null=True, db_index=True, ) description = models.CharField(max_length=200, blank=True) objects = TreeManager() csv_headers = ["site", "parent", "name", "slug", "description"] class Meta: ordering = ["site", "name"] unique_together = [ ["site", "name"], # TODO: Remove unique_together to make slug globally unique. This would be a breaking change. ["site", "slug"], ] class MPTTMeta: order_insertion_by = ["name"] def __str__(self): return self.name def get_absolute_url(self): return reverse("dcim:rackgroup", args=[self.pk]) def to_csv(self): return ( self.site, self.parent.name if self.parent else "", self.name, self.slug, self.description, ) def to_objectchange(self, action): # Remove MPTT-internal fields return super().to_objectchange(action, object_data_exclude=["level", "lft", "rght", "tree_id"]) def clean(self): super().clean() # Parent RackGroup (if any) must belong to the same Site if self.parent and self.parent.site != self.site: raise ValidationError(f"Parent rack group ({self.parent}) must belong to the same site ({self.site})")
class Relationship(BaseModel, ChangeLoggedModel): name = models.CharField(max_length=100, unique=True, help_text="Internal relationship name") slug = AutoSlugField(populate_from="name") description = models.CharField(max_length=200, blank=True) type = models.CharField( max_length=50, choices=RelationshipTypeChoices, default=RelationshipTypeChoices.TYPE_MANY_TO_MANY, help_text="Cardinality of this relationship", ) # # Source # source_type = models.ForeignKey( to=ContentType, on_delete=models.CASCADE, related_name="source_relationships", verbose_name="Source Object", limit_choices_to=FeatureQuery("relationships"), help_text="The source object type to which this relationship applies.", ) source_label = models.CharField( max_length=50, blank=True, verbose_name="Source Label", help_text="Label for related destination objects, as displayed on the source object.", ) source_hidden = models.BooleanField( default=False, verbose_name="Hide for source object", help_text="Hide this relationship on the source object.", ) source_filter = models.JSONField( encoder=DjangoJSONEncoder, blank=True, null=True, help_text="Queryset filter matching the applicable source objects of the selected type", ) # # Destination # destination_type = models.ForeignKey( to=ContentType, on_delete=models.CASCADE, related_name="destination_relationships", verbose_name="Destination Object", limit_choices_to=FeatureQuery("relationships"), help_text="The destination object type to which this relationship applies.", ) destination_label = models.CharField( max_length=50, blank=True, verbose_name="Destination Label", help_text="Label for related source objects, as displayed on the destination object.", ) destination_hidden = models.BooleanField( default=False, verbose_name="Hide for destination object", help_text="Hide this relationship on the destination object.", ) destination_filter = models.JSONField( encoder=DjangoJSONEncoder, blank=True, null=True, help_text="Queryset filter matching the applicable destination objects of the selected type", ) objects = RelationshipManager() class Meta: ordering = ["name"] def __str__(self): return self.name.replace("_", " ") @property def symmetric(self): return self.type in ( RelationshipTypeChoices.TYPE_ONE_TO_ONE_SYMMETRIC, RelationshipTypeChoices.TYPE_MANY_TO_MANY_SYMMETRIC, ) def get_label(self, side): """Return the label for a given side, source or destination. If the label is not returned, return the verbose_name_plural of the other object """ if side not in VALID_SIDES: raise ValueError(f"side value can only be: {','.join(VALID_SIDES)}") # Peer "side" implies symmetric relationship, where source and dest are equivalent if side == RelationshipSideChoices.SIDE_PEER: side = RelationshipSideChoices.SIDE_SOURCE if getattr(self, f"{side}_label"): return getattr(self, f"{side}_label") if side == RelationshipSideChoices.SIDE_SOURCE: destination_model = self.destination_type.model_class() if not destination_model: # perhaps a plugin was uninstalled? return str(self) if self.type in ( RelationshipTypeChoices.TYPE_MANY_TO_MANY, RelationshipTypeChoices.TYPE_MANY_TO_MANY_SYMMETRIC, RelationshipTypeChoices.TYPE_ONE_TO_MANY, ): return destination_model._meta.verbose_name_plural else: return destination_model._meta.verbose_name elif side == RelationshipSideChoices.SIDE_DESTINATION: source_model = self.source_type.model_class() if not source_model: # perhaps a plugin was uninstalled? return str(self) if self.type in ( RelationshipTypeChoices.TYPE_MANY_TO_MANY, RelationshipTypeChoices.TYPE_MANY_TO_MANY_SYMMETRIC, ): return source_model._meta.verbose_name_plural else: return source_model._meta.verbose_name return None def has_many(self, side): """Return True if the given side of the relationship can support multiple objects.""" if side not in VALID_SIDES: raise ValueError(f"side value can only be: {','.join(VALID_SIDES)}") if self.type in ( RelationshipTypeChoices.TYPE_MANY_TO_MANY, RelationshipTypeChoices.TYPE_MANY_TO_MANY_SYMMETRIC, ): return True if self.type in (RelationshipTypeChoices.TYPE_ONE_TO_ONE, RelationshipTypeChoices.TYPE_ONE_TO_ONE_SYMMETRIC): return False # ONE_TO_MANY return side == RelationshipSideChoices.SIDE_DESTINATION def to_form_field(self, side): """ Return a form field suitable for setting a Relationship's value for an object. """ if side not in VALID_SIDES: raise ValueError(f"side value can only be: {','.join(VALID_SIDES)}") peer_side = RelationshipSideChoices.OPPOSITE[side] if peer_side != RelationshipSideChoices.SIDE_PEER: object_type = getattr(self, f"{peer_side}_type") filters = getattr(self, f"{peer_side}_filter") or {} else: # Symmetric relationship - source and dest fields are presumed identical, so just use source object_type = getattr(self, "source_type") filters = getattr(self, "source_filter") or {} model_class = object_type.model_class() if model_class: queryset = model_class.objects.all() else: # maybe a relationship to a model that no longer exists, such as a removed plugin? queryset = None field_class = None if queryset: if self.has_many(peer_side): field_class = DynamicModelMultipleChoiceField else: field_class = DynamicModelChoiceField field = field_class(queryset=queryset, query_params=filters) else: field = forms.MultipleChoiceField(widget=widgets.StaticSelect2Multiple) field.model = self field.required = False field.label = self.get_label(side) if self.description: field.help_text = self.description return field def clean(self): # Check if source and destination filters are valid for side in ["source", "destination"]: if not getattr(self, f"{side}_filter"): continue filter = getattr(self, f"{side}_filter") side_model = getattr(self, f"{side}_type").model_class() if not side_model: # can happen if for example a plugin providing the model was uninstalled raise ValidationError({f"{side}_type": "Unable to locate model class"}) model_name = side_model._meta.label if not isinstance(filter, dict): raise ValidationError({f"{side}_filter": f"Filter for {model_name} must be a dictionary"}) filterset_class = get_filterset_for_model(side_model) if not filterset_class: raise ValidationError( { f"{side}_filter": f"Filters are not supported for {model_name} object (Unable to find a FilterSet)" } ) filterset = filterset_class(filter, side_model.objects.all()) error_messages = [] if filterset.errors: for key in filterset.errors: error_messages.append(f"'{key}': " + ", ".join(filterset.errors[key])) filterset_params = set(filterset.get_filters().keys()) for key in filter.keys(): if key not in filterset_params: error_messages.append(f"'{key}' is not a valid filter parameter for {model_name} object") if error_messages: raise ValidationError({f"{side}_filter": error_messages}) if self.symmetric: # For a symmetric relation, source and destination attributes must be equivalent if specified error_messages = {} if self.source_type != self.destination_type: error_messages["destination_type"] = "Must match source_type for a symmetric relationship" if self.source_label != self.destination_label: if not self.source_label: self.source_label = self.destination_label elif not self.destination_label: self.destination_label = self.source_label else: error_messages["destination_label"] = "Must match source_label for a symmetric relationship" if self.source_hidden != self.destination_hidden: error_messages["destination_hidden"] = "Must match source_hidden for a symmetric relationship" if self.source_filter != self.destination_filter: if not self.source_filter: self.source_filter = self.destination_filter elif not self.destination_filter: self.destination_filter = self.source_filter else: error_messages["destination_filter"] = "Must match source_filter for a symmetric relationship" if error_messages: raise ValidationError(error_messages) # If the model already exist, ensure that it's not possible to modify the source or destination type if self.present_in_database: nbr_existing_cras = RelationshipAssociation.objects.filter(relationship=self).count() if nbr_existing_cras and self.__class__.objects.get(pk=self.pk).type != self.type: raise ValidationError( "Not supported to change the type of the relationship when some associations" " are present in the database, delete all associations first before modifying the type." ) if nbr_existing_cras and self.__class__.objects.get(pk=self.pk).source_type != self.source_type: raise ValidationError( "Not supported to change the type of the source object when some associations" " are present in the database, delete all associations first before modifying the source type." ) elif nbr_existing_cras and self.__class__.objects.get(pk=self.pk).destination_type != self.destination_type: raise ValidationError( "Not supported to change the type of the destination object when some associations" " are present in the database, delete all associations first before modifying the destination type." )
class GitRepository(PrimaryModel): """Representation of a Git repository used as an external data source.""" TOKEN_PLACEHOLDER = "********" name = models.CharField( max_length=100, unique=True, ) slug = AutoSlugField(populate_from="name") remote_url = models.URLField( max_length=255, # For the moment we don't support ssh:// and git:// URLs help_text="Only HTTP and HTTPS URLs are presently supported", validators=[URLValidator(schemes=["http", "https"])], ) branch = models.CharField( max_length=64, default="main", ) current_head = models.CharField( help_text="Commit hash of the most recent fetch from the selected branch. Used for syncing between workers.", max_length=48, default="", blank=True, ) # Mark field as private so that it doesn't get included in ChangeLogging records! _token = encrypt( models.CharField( max_length=200, blank=True, default="", ) ) username = models.CharField( max_length=64, blank=True, default="", ) secrets_group = models.ForeignKey( to="extras.SecretsGroup", on_delete=models.SET_NULL, default=None, blank=True, null=True, ) # Data content types that this repo is a source of. Valid options are dynamically generated based on # the data types registered in registry['datasource_contents']. provided_contents = models.JSONField(encoder=DjangoJSONEncoder, default=list, blank=True) csv_headers = ["name", "slug", "remote_url", "branch", "secrets_group", "provided_contents"] clone_fields = ["remote_url", "secrets_group", "provided_contents"] class Meta: ordering = ["name"] verbose_name = "Git repository" verbose_name_plural = "Git repositories" def __init__(self, *args, **kwargs): # If instantiated from the REST API, the originating Request will be passed as a kwarg: self.request = kwargs.pop("request", None) super().__init__(*args, **kwargs) # Store the initial repo slug and token so we can check for changes on save(). self.__initial_slug = self.slug self.__initial_token = self._token def __str__(self): return self.name def get_absolute_url(self): return reverse("extras:gitrepository", kwargs={"slug": self.slug}) def to_csv(self): return ( self.name, self.slug, self.remote_url, self.branch, self.provided_contents, ) @property def token_rendered(self): if self._token: return self.TOKEN_PLACEHOLDER else: return "—" @property def filesystem_path(self): return os.path.join(settings.GIT_ROOT, self.slug) def save(self, *args, trigger_resync=True, **kwargs): if self.__initial_token and self._token == self.TOKEN_PLACEHOLDER: # User edited the repo but did NOT specify a new token value. Make sure we keep the existing value. self._token = self.__initial_token super().save(*args, **kwargs) def on_commit_callback(): if self.__initial_slug and self.slug != self.__initial_slug: # Rename any previously existing repo directory to the new slug. # TODO: In a distributed Nautobot deployment, each Django instance and/or RQ worker instance may # have its own clone of this repository on its own local filesystem; we need some way to ensure # that all such clones are renamed. # For now we just rename the one that we have locally and rely on other methods # (notably get_jobs()) to clean up other clones as they're encountered. if os.path.exists(os.path.join(settings.GIT_ROOT, self.__initial_slug)): os.rename( os.path.join(settings.GIT_ROOT, self.__initial_slug), self.filesystem_path, ) if trigger_resync: assert self.request is not None, "No HTTP request associated with this update!" from nautobot.extras.datasources import ( enqueue_pull_git_repository_and_refresh_data, ) enqueue_pull_git_repository_and_refresh_data(self, self.request) # Update cached values self.__initial_token = self._token self.__initial_slug = self.slug transaction.on_commit(on_commit_callback)
class DeviceType(PrimaryModel): """ A DeviceType represents a particular make (Manufacturer) and model of device. It specifies rack height and depth, as well as high-level functional role(s). Each DeviceType can have an arbitrary number of component templates assigned to it, which define console, power, and interface objects. For example, a Juniper EX4300-48T DeviceType would have: * 1 ConsolePortTemplate * 2 PowerPortTemplates * 48 InterfaceTemplates When a new Device of this type is created, the appropriate console, power, and interface objects (as defined by the DeviceType) are automatically created as well. """ manufacturer = models.ForeignKey(to="dcim.Manufacturer", on_delete=models.PROTECT, related_name="device_types") model = models.CharField(max_length=100) # TODO: Remove unique=None to make slug globally unique. This would be a breaking change. slug = AutoSlugField(populate_from="model", unique=None) part_number = models.CharField(max_length=50, blank=True, help_text="Discrete part number (optional)") u_height = models.PositiveSmallIntegerField(default=1, verbose_name="Height (U)") is_full_depth = models.BooleanField( default=True, verbose_name="Is full depth", help_text="Device consumes both front and rear rack faces", ) subdevice_role = models.CharField( max_length=50, choices=SubdeviceRoleChoices, blank=True, verbose_name="Parent/child status", help_text="Parent devices house child devices in device bays. Leave blank " "if this device type is neither a parent nor a child.", ) front_image = models.ImageField(upload_to="devicetype-images", blank=True) rear_image = models.ImageField(upload_to="devicetype-images", blank=True) comments = models.TextField(blank=True) clone_fields = [ "manufacturer", "u_height", "is_full_depth", "subdevice_role", ] class Meta: ordering = ["manufacturer", "model"] unique_together = [ ["manufacturer", "model"], # TODO: Remove unique_together to make slug globally unique. This would be a breaking change. ["manufacturer", "slug"], ] def __str__(self): return self.model def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # Save a copy of u_height for validation in clean() self._original_u_height = self.u_height if self.present_in_database else 1 # Save references to the original front/rear images self._original_front_image = self.front_image if self.present_in_database else None self._original_rear_image = self.rear_image if self.present_in_database else None def get_absolute_url(self): return reverse("dcim:devicetype", args=[self.pk]) def to_yaml(self): data = OrderedDict( ( ("manufacturer", self.manufacturer.name), ("model", self.model), ("slug", self.slug), ("part_number", self.part_number), ("u_height", self.u_height), ("is_full_depth", self.is_full_depth), ("subdevice_role", self.subdevice_role), ("comments", self.comments), ) ) # Component templates if self.consoleporttemplates.exists(): data["console-ports"] = [ { "name": c.name, "type": c.type, } for c in self.consoleporttemplates.all() ] if self.consoleserverporttemplates.exists(): data["console-server-ports"] = [ { "name": c.name, "type": c.type, } for c in self.consoleserverporttemplates.all() ] if self.powerporttemplates.exists(): data["power-ports"] = [ { "name": c.name, "type": c.type, "maximum_draw": c.maximum_draw, "allocated_draw": c.allocated_draw, } for c in self.powerporttemplates.all() ] if self.poweroutlettemplates.exists(): data["power-outlets"] = [ { "name": c.name, "type": c.type, "power_port": c.power_port.name if c.power_port else None, "feed_leg": c.feed_leg, } for c in self.poweroutlettemplates.all() ] if self.interfacetemplates.exists(): data["interfaces"] = [ { "name": c.name, "type": c.type, "mgmt_only": c.mgmt_only, } for c in self.interfacetemplates.all() ] if self.frontporttemplates.exists(): data["front-ports"] = [ { "name": c.name, "type": c.type, "rear_port": c.rear_port.name, "rear_port_position": c.rear_port_position, } for c in self.frontporttemplates.all() ] if self.rearporttemplates.exists(): data["rear-ports"] = [ { "name": c.name, "type": c.type, "positions": c.positions, } for c in self.rearporttemplates.all() ] if self.devicebaytemplates.exists(): data["device-bays"] = [ { "name": c.name, } for c in self.devicebaytemplates.all() ] return yaml.dump(dict(data), sort_keys=False) def clean(self): super().clean() # If editing an existing DeviceType to have a larger u_height, first validate that *all* instances of it have # room to expand within their racks. This validation will impose a very high performance penalty when there are # many instances to check, but increasing the u_height of a DeviceType should be a very rare occurrence. if self.present_in_database and self.u_height > self._original_u_height: for d in Device.objects.filter(device_type=self, position__isnull=False): face_required = None if self.is_full_depth else d.face u_available = d.rack.get_available_units( u_height=self.u_height, rack_face=face_required, exclude=[d.pk] ) if d.position not in u_available: raise ValidationError( { "u_height": "Device {} in rack {} does not have sufficient space to accommodate a height of " "{}U".format(d, d.rack, self.u_height) } ) # If modifying the height of an existing DeviceType to 0U, check for any instances assigned to a rack position. elif self.present_in_database and self._original_u_height > 0 and self.u_height == 0: racked_instance_count = Device.objects.filter(device_type=self, position__isnull=False).count() if racked_instance_count: url = f"{reverse('dcim:device_list')}?manufactuer_id={self.manufacturer_id}&device_type_id={self.pk}" raise ValidationError( { "u_height": mark_safe( f'Unable to set 0U height: Found <a href="{url}">{racked_instance_count} instances</a> already ' f"mounted within racks." ) } ) if (self.subdevice_role != SubdeviceRoleChoices.ROLE_PARENT) and self.devicebaytemplates.count(): raise ValidationError( { "subdevice_role": "Must delete all device bay templates associated with this device before " "declassifying it as a parent device." } ) if self.u_height and self.subdevice_role == SubdeviceRoleChoices.ROLE_CHILD: raise ValidationError({"u_height": "Child device types must be 0U."}) def save(self, *args, **kwargs): ret = super().save(*args, **kwargs) # Delete any previously uploaded image files that are no longer in use if self._original_front_image and self.front_image != self._original_front_image: self._original_front_image.delete(save=False) if self._original_rear_image and self.rear_image != self._original_rear_image: self._original_rear_image.delete(save=False) return ret def delete(self, *args, **kwargs): super().delete(*args, **kwargs) # Delete any uploaded image files if self.front_image: self.front_image.delete(save=False) if self.rear_image: self.rear_image.delete(save=False) @property def display(self): return f"{self.manufacturer.name} {self.model}" @property def is_parent_device(self): return self.subdevice_role == SubdeviceRoleChoices.ROLE_PARENT @property def is_child_device(self): return self.subdevice_role == SubdeviceRoleChoices.ROLE_CHILD