class ResourceMetadataInRDF(BaseModel): rdf_subject: RDFIdentifier = Field(default_factory=hs_uid) _parse_rdf_subject = root_validator( pre=True, allow_reuse=True)(rdf_parse_rdf_subject) rdf_type: AnyUrl = Field(rdf_predicate=RDF.type, const=True, default=HSTERMS.CompositeResource) label: str = Field(default="Composite Resource", const=True) title: str = Field(rdf_predicate=DC.title) description: DescriptionInRDF = Field(rdf_predicate=DC.description, default_factory=DescriptionInRDF) language: str = Field(rdf_predicate=DC.language, default='eng') subjects: List[str] = Field(rdf_predicate=DC.subject, default=[]) dc_type: AnyUrl = Field(rdf_predicate=DC.type, default=HSTERMS.CompositeResource, const=True) identifier: IdentifierInRDF = Field(rdf_predicate=DC.identifier, cont=True) creators: List[CreatorInRDF] = Field(rdf_predicate=DC.creator, default=[]) contributors: List[ContributorInRDF] = Field(rdf_predicate=DC.contributor, default=[]) relations: List[RelationInRDF] = Field(rdf_predicate=DC.relation, default=[]) extended_metadata: List[ExtendedMetadataInRDF] = Field( rdf_predicate=HSTERMS.extendedMetadata, default=[]) rights: RightsInRDF = Field(rdf_predicate=DC.rights, default=None) dates: List[DateInRDF] = Field(rdf_predicate=DC.date, default=[]) awards: List[AwardInfoInRDF] = Field(rdf_predicate=HSTERMS.awardInfo, default=[]) coverages: List[CoverageInRDF] = Field(rdf_predicate=DC.coverage, default=[]) publisher: PublisherInRDF = Field(rdf_predicate=DC.publisher, default=None) citation: str = Field(rdf_predicate=DCTERMS.bibliographicCitation) _parse_coverages = root_validator(pre=True, allow_reuse=True)(parse_coverages) _parse_extended_metadata = root_validator( pre=True, allow_reuse=True)(parse_rdf_extended_metadata) _parse_rdf_dates = root_validator(pre=True, allow_reuse=True)(parse_rdf_dates) _parse_description = root_validator( pre=True, allow_reuse=True)(rdf_parse_description) _parse_identifier = validator("identifier", pre=True, allow_reuse=True)(rdf_parse_identifier) _language_constraint = validator('language', allow_reuse=True)(language_constraint) _dates_constraint = validator('dates', allow_reuse=True)(dates_constraint) _coverages_constraint = validator('coverages', allow_reuse=True)(coverages_constraint) _coverages_spatial_constraint = validator( 'coverages', allow_reuse=True)(coverages_spatial_constraint) _sort_creators = validator("creators", pre=True)(sort_creators)
class BaseAggregationMetadataInRDF(RDFBaseModel): _parse_rdf_subject = root_validator(pre=True, allow_reuse=True)(rdf_parse_rdf_subject) title: str = Field(rdf_predicate=DC.title) subjects: List[str] = Field(rdf_predicate=DC.subject, default=[]) language: str = Field(rdf_predicate=DC.language, default="eng") extended_metadata: List[ExtendedMetadataInRDF] = Field(rdf_predicate=HSTERMS.extendedMetadata, default=[]) coverages: List[CoverageInRDF] = Field(rdf_predicate=DC.coverage, default=[]) rights: RightsInRDF = Field(rdf_predicate=DC.rights, default=[]) _parse_coverages = root_validator(pre=True, allow_reuse=True)(parse_coverages) _parse_extended_metadata = root_validator(pre=True, allow_reuse=True)(parse_rdf_extended_metadata)
class Link(Extensible, BaseModel): operationRef: Optional[str] = None operationId: Optional[str] = None parameters: Dict[str, Any] = Field({}) requestBody: Optional[str] = None description: str = "" server: Optional[Server] = None chainId: Optional[str] = Field(None, alias="x-apigraph-chainId") requestBodyParameters: Dict[str, str] = Field( {}, alias="x-apigraph-requestBodyParameters") check_request_body_ = root_validator(check_request_body, allow_reuse=True) @root_validator def check_operation_identifier(cls, values): operationRef = values.get("operationRef") operationId = values.get("operationId") if operationRef and operationId: raise ValueError( "`operationRef` and `operationId` are mutually-exclusive") if not any((operationRef, operationId)): raise ValueError( "One-of `operationRef` or `operationId` are required") return values
class Skin(BaseModel): class MetaData(BaseModel): model: Literal['default', 'slim'] = 'default' url: Optional[str] hash: Optional[str] = None metadata: Optional[MetaData] = MetaData(model='default') _hash = root_validator(pre=True, allow_reuse=True)(make_hash)
class TimeSeriesMetadataIn(BaseAggregationMetadataIn): """ A class used to represent the metadata associated with a time series aggregation A time series aggregation consists of one or more time series datasets to which aggregation-level metadata have been added. Time series datasets in HydroShare consist of sequences of individual data values that are ordered in time to record the changing trend of a certain phenomenon. They are stored in HydroShare using ODM2 SQLite database files. """ class Config: title = 'Time Series Aggregation Metadata' schema_config = {'read_only': ['type', 'url'], 'dictionary_field': ['additional_metadata']} time_series_results: List[TimeSeriesResult] = Field( default=[], title="Time series results", description="A list of time series results contained within the time series aggregation", ) abstract: str = Field(default=None, title="Abstract", description="A string containing a summary of a aggregation") _parse_abstract = root_validator(pre=True, allow_reuse=True)(parse_abstract)
def create_ensemble_config( plugin_registry: ConfigPluginRegistry, ) -> Type[EnsembleConfig]: """Return a :class:`EnsembleConfig` with plugged-in configurations.""" discriminator = plugin_registry.get_descriminator("transformation") default_discriminator = plugin_registry.get_default_for_category( "transformation") input_config = create_plugged_model( model_name="PluggedEnsembleInput", categories=["transformation"], plugin_registry=plugin_registry, model_base=EnsembleInput, model_module=__name__, validators={ "validate_transformation": root_validator(pre=True, allow_reuse=True)(_validate_transformation( discriminator=discriminator, default_for_category=default_discriminator, )) }, ) ensemble_config = create_model( "PluggedEnsembleConfig", __base__=EnsembleConfig, __module__=__name__, input=(Tuple[input_config, ...], ...), ) return ensemble_config
class TimeSeriesResultInRDF(TimeSeriesResult, RDFBaseModel): unit: UnitInRDF = Field(rdf_predicate=HSTERMS.unit, default=None) site: TimeSeriesSiteInRDF = Field(rdf_predicate=HSTERMS.site) variable: TimeSeriesVariableInRDF = Field(rdf_predicate=HSTERMS.variable) method: TimeSeriesMethodInRDF = Field(rdf_predicate=HSTERMS.method) processing_level: ProcessingLevelInRDF = Field( rdf_predicate=HSTERMS.processingLevel) utc_offset: UTCOffSetInRDF = Field(rdf_predicate=HSTERMS.UTCOffSet, default=None) class Config: fields = { 'series_id': { "rdf_predicate": HSTERMS.timeSeriesResultUUID }, 'status': { "rdf_predicate": HSTERMS.Status }, 'sample_medium': { "rdf_predicate": HSTERMS.SampleMedium }, 'value_count': { "rdf_predicate": HSTERMS.ValueCount }, 'aggregation_statistic': { "rdf_predicate": HSTERMS.AggregationStatistic }, 'series_label': { "rdf_predicate": HSTERMS.SeriesLabel }, } _parse_utc_offset = root_validator(pre=True)(rdf_parse_utc_offset)
def create_stages_config(plugin_registry: ConfigPluginRegistry) -> "Type[StagesConfig]": """Return a :class:`StagesConfig` with plugged-in configurations.""" discriminator = plugin_registry.get_descriminator("transformation") default_discriminator = plugin_registry.get_default_for_category("transformation") stage_io = create_plugged_model( model_name="PluggedStageIO", categories=["transformation"], plugin_registry=plugin_registry, model_base=StageIO, model_module=__name__, validators={ "discriminator_default": root_validator(pre=True, allow_reuse=True)( _default_transformation_discriminator( category="transformation", discriminator=discriminator, default_for_category=default_discriminator, ) ), }, ) # duck punching _Step to bridge static and dynamic config definitions. StageIO # exists only at run-time, but _Step (and subclasses) should be static. _Step._stageio_cls = stage_io # Returning the StagesConfig class to underline that it needs some dynamic mutation. return StagesConfig
def _populate_self_validator(): def _populate_self(cls, values): del cls if values.get("self", None) is None: values["self"] = values["id"] return values return pydantic.root_validator(pre=True, allow_reuse=True)(_populate_self)
class Y(BaseModel): a: Optional[str] b: Optional[str] c: Optional[str] x: int = 1 _abc_choice_validator = root_validator(allow_reuse=True)( choice_of_validator({"a", "b", "c"}, True))
class BaseAggregationMetadataIn(BaseMetadata): title: str = Field( default=None, title="Aggregation title", description="A string containing a descriptive title for the aggregation", ) subjects: List[str] = Field( default=[], title="Subject keywords", description="A list of keyword strings expressing the topic of the aggregation", ) language: str = Field( default="eng", title="Language", description="The 3-character string for the language in which the metadata and content are expressed", ) additional_metadata: Dict[str, str] = Field( default={}, title="Extended metadata", description="A list of extended metadata elements expressed as key-value pairs", ) spatial_coverage: Union[PointCoverage, BoxCoverage] = Field( default=None, title="Spatial coverage", description="An object containing the geospatial coverage for the aggregation expressed as either a bounding box or point", ) period_coverage: PeriodCoverage = Field( default=None, title="Temporal coverage", description="An object containing the temporal coverage for a aggregation expressed as a date range", ) _parse_additional_metadata = root_validator(pre=True, allow_reuse=True)(parse_additional_metadata) _parse_coverages = root_validator(pre=True, allow_reuse=True)(split_coverages) _subjects_constraint = validator('subjects', allow_reuse=True)(subjects_constraint) _language_constraint = validator('language', allow_reuse=True)(language_constraint) _parse_spatial_coverage = validator("spatial_coverage", allow_reuse=True, pre=True)(parse_spatial_coverage) _normalize_additional_metadata = root_validator(allow_reuse=True, pre=True)(normalize_additional_metadata)
class ResourceMetadata(ResourceMetadataIn): type: str = Field( const=True, default="CompositeResource", title="Resource Type", description="An object containing a URL that points to the HydroShare resource type selected from the hsterms namespace", allow_mutation=False, ) url: AnyUrl = Field(title="URL", description="An object containing the URL for a resource", allow_mutation=False) identifier: AnyUrl = Field( title="Identifier", description="An object containing the URL-encoded unique identifier for a resource", allow_mutation=False, ) created: datetime = Field( default_factory=datetime.now, title="Creation date", description="A datetime object containing the instant associated with when a resource was created", allow_mutation=False, ) modified: datetime = Field( default_factory=datetime.now, title="Modified date", description="A datetime object containing the instant associated with when a resource was last modified", allow_mutation=False, ) published: datetime = Field( default=None, title="Published date", description="A datetime object containing the instant associated with when a resource was published", allow_mutation=False, ) _parse_dates = root_validator(pre=True, allow_reuse=True)(split_dates) _parse_url = root_validator(pre=True, allow_reuse=True)(parse_url) _parse_identifier = validator("identifier", pre=True)(parse_identifier)
class CreatorInRDF(RDFBaseModel): creator_order: PositiveInt name: str = Field(default=None) phone: str = Field(default=None) address: str = Field(default=None) organization: str = Field(default=None) email: EmailStr = Field(default=None) homepage: HttpUrl = Field(default=None) description: str = Field(max_length=50, default=None) ORCID: AnyUrl = Field(default=None) google_scholar_id: AnyUrl = Field(default=None) research_gate_id: AnyUrl = Field(default=None) _group_identifiers = root_validator( pre=True, allow_reuse=True)(split_user_identifiers) class Config: fields = { 'name': { "rdf_predicate": HSTERMS.name }, 'creator_order': { "rdf_predicate": HSTERMS.creatorOrder }, 'google_scholar_id': { "rdf_predicate": HSTERMS.GoogleScholarID }, 'research_gate_id': { "rdf_predicate": HSTERMS.ResearchGateID }, 'phone': { "rdf_predicate": HSTERMS.phone }, 'ORCID': { "rdf_predicate": HSTERMS.ORCID }, 'address': { "rdf_predicate": HSTERMS.address }, 'organization': { "rdf_predicate": HSTERMS.organization }, 'email': { "rdf_predicate": HSTERMS.email }, 'homepage': { "rdf_predicate": HSTERMS.homepage }, 'description': { "rdf_predicate": HSTERMS.description }, }
def required_group(props: Set[str]): """Require at least one of the properties to be present.""" def _require_group(_model, values: dict): defined_props = props & { key for key, value in values.items() if value is not None } if len(defined_props) < 1: raise ValueError( "At least one of {} was required; none found".format(props)) return values return root_validator(allow_reuse=True)(_require_group)
class GeographicFeatureMetadataInRDF(BaseAggregationMetadataInRDF): rdf_type: AnyUrl = Field(rdf_predicate=RDF.type, const=True, default=HSTERMS.GeographicFeatureAggregation) label: str = Field( const=True, default="Geographic Feature Content: The multiple files that are part of a " "geographic shapefile" ) dc_type: AnyUrl = Field(rdf_predicate=DC.type, default=HSTERMS.GeographicFeatureAggregation, const=True) field_information: List[FieldInformationInRDF] = Field(rdf_predicate=HSTERMS.FieldInformation, default=[]) geometry_information: GeometryInformationInRDF = Field(rdf_predicate=HSTERMS.GeometryInformation) spatial_reference: SpatialReferenceInRDF = Field(rdf_predicate=HSTERMS.spatialReference, default=None) _parse_spatial_reference = root_validator(pre=True, allow_reuse=True)(parse_rdf_spatial_reference)
class TimeSeriesMetadataInRDF(BaseAggregationMetadataInRDF): rdf_type: AnyUrl = Field(rdf_predicate=RDF.type, const=True, default=HSTERMS.TimeSeriesAggregation) label: str = Field( const=True, default="Time Series Content: One or more time series held in an ODM2 format " "SQLite file and optional source comma separated (.csv) files", ) dc_type: AnyUrl = Field(rdf_predicate=DC.type, default=HSTERMS.TimeSeriesAggregation, const=True) description: DescriptionInRDF = Field(rdf_predicate=DC.description, default_factory=DescriptionInRDF) time_series_results: List[TimeSeriesResultInRDF] = Field(rdf_predicate=HSTERMS.timeSeriesResult, default=[]) _parse_description = root_validator(pre=True, allow_reuse=True)(rdf_parse_description)
class GeographicRasterMetadataInRDF(BaseAggregationMetadataInRDF): rdf_type: AnyUrl = Field(rdf_predicate=RDF.type, const=True, default=HSTERMS.GeographicRasterAggregation) label: str = Field( const=True, default="Geographic Raster Content: A geographic grid represented by a virtual " "raster tile (.vrt) file and one or more geotiff (.tif) files", ) dc_type: AnyUrl = Field(rdf_predicate=DC.type, default=HSTERMS.GeographicRasterAggregation, const=True) band_information: BandInformationInRDF = Field(rdf_predicate=HSTERMS.BandInformation) spatial_reference: SpatialReferenceInRDF = Field(rdf_predicate=HSTERMS.spatialReference, default=None) cell_information: CellInformationInRDF = Field(rdf_predicate=HSTERMS.CellInformation) _parse_spatial_reference = root_validator(pre=True, allow_reuse=True)(parse_rdf_spatial_reference)
class MultidimensionalMetadataInRDF(BaseAggregationMetadataInRDF): rdf_type: AnyUrl = Field(rdf_predicate=RDF.type, const=True, default=HSTERMS.MultidimensionalAggregation) label: str = Field( const=True, default="Multidimensional Content: A multidimensional dataset represented by a " "NetCDF file (.nc) and text file giving its NetCDF header content", ) dc_type: AnyUrl = Field(rdf_predicate=DC.type, default=HSTERMS.MultidimensionalAggregation, const=True) variables: List[VariableInRDF] = Field(rdf_predicate=HSTERMS.Variable, default=[]) spatial_reference: MultidimensionalSpatialReferenceInRDF = Field( rdf_predicate=HSTERMS.spatialReference, default=None ) _parse_spatial_reference = root_validator(pre=True, allow_reuse=True)(parse_rdf_multidimensional_spatial_reference)
class Relation(BaseMetadata): """ A class used to represent the metadata associated with a resource related to the resource being described """ class Config: title = 'Related Resource Metadata' type: RelationType = Field( title="Relation type", description="The type of relationship with the related resource") value: str = Field( max_length=500, title="Value", description= "String expressing the Full text citation, URL link for, or description of the related resource", ) _parse_relation = root_validator(pre=True)(parse_relation)
def _get_oneof_setter(oneof_fields: List, oneof_key: str) -> Callable: """ Pydantic root validator (post) classmethod generator to set the oneof key :param oneof_fields: list of field names for oneof :type oneof_fields: List :param oneof_key: oneof key :type oneof_key: str :return: classmethod for setting oneof fields in Pydantic models """ def oneof_setter(cls, values): for oneof_field in oneof_fields: if values[oneof_field] == cls.__fields__[oneof_field].default: values.pop(oneof_field) return values oneof_setter.__qualname__ = 'set_' + oneof_key return root_validator(pre=False, allow_reuse=True)(oneof_setter)
def make_validator( func: Callable, key: str, *, opts: Dict = {}, **params ) -> _ValidatorMap_t: """Create a validator classmethod by wrapping a function in a closure Parameters ---------- func : Callable Function to use as a validator. The function should conform to the following argspec: FullArgSpec(args=['cls', 'val', 'values'], varargs=None, kwonlyargs=[...]) key : str Key to associate validator with. If key is "falsy" (empty string, None, etc), a root_validator is created instead. opts : Dict **params Keyword arguments to be passed on to the validator function `func` Returns ------- _ValidatorMap_t A dictionary with the validator function wrapped as a classmethod {'function_name' : classmethod(func)} """ # NOTE: cannot use functools.partial because pydantic does very restrictive # function signature checks. The module and qualitative names are also # expected to be set. # wrapper = partial(func, **params) def wrapper(cls, val, values): return func(cls, val, values, **params) # pydantic keeps a global registry of all validators as <module>.<name>, # and prevents reuse unless explicitly overridden with allow_reuse=True wrapper.__module__ = f"<dynamic-{uuid4().hex}>" wrapper.__qualname__ = func.__name__ decorator = validator(key, **opts) if key else root_validator(**opts) return {func.__name__: decorator(wrapper)}
def _get_tags_updater() -> Callable: """ Pydantic root validator (pre) classmethod generator to update tags :return: classmethod for updating tags in DocumentProto Pydantic model """ def tags_updater(cls, values): extra_fields = { k: values[k] for k in set(values).difference(cls.__fields__) } if extra_fields: if 'tags' not in values: values['tags'] = {} if isinstance(values['tags'], Dict): values['tags'].update(extra_fields) return values return root_validator(pre=True, allow_reuse=True)(tags_updater)
def _get_tags_updater() -> Callable: """ Pydantic root validator (pre) classmethod generator to update tags :return: classmethod for updating tags in DocumentProto Pydantic model """ def tags_updater(cls, values): extra_fields = { k: values[k] for k in set(values).difference(cls.__fields__) } if extra_fields: if 'tags' not in values: values['tags'] = cls.__fields__['tags'].default if isinstance(values['tags'], Dict): values['tags'].update({i: j for i, j in extra_fields.items()}) return values return root_validator(pre=True, allow_reuse=True)(tags_updater)
def _get_oneof_validator(oneof_fields: List, oneof_key: str) -> Callable: """ Pydantic root validator (pre) classmethod generator to confirm only one oneof field is passed :param oneof_fields: list of field names for oneof :type oneof_fields: List :param oneof_key: oneof key :type oneof_key: str :return: classmethod for validating oneof fields """ def oneof_validator(cls, values): if len(set(oneof_fields).intersection(set(values))) > 1: raise ValueError( f'only one field among {oneof_fields} can be set for key {oneof_key}!' ) return values oneof_validator.__qualname__ = 'validate_' + oneof_key return root_validator(pre=True, allow_reuse=True)(oneof_validator)
class Backlink(Extensible, BaseModel): responseRef: Optional[str] = None operationRef: Optional[str] = None operationId: Optional[str] = None response: Optional[str] = None chainId: Optional[str] = None parameters: Dict[str, Any] = Field({}) requestBody: Optional[str] = None requestBodyParameters: Dict[str, str] = Field({}) description: str = "" server: Optional[Server] = None check_request_body_ = root_validator(check_request_body, allow_reuse=True) @root_validator def check_response_identifier(cls, values): responseRef = values.get("responseRef") operationRef = values.get("operationRef") operationId = values.get("operationId") response = values.get("response") if (responseRef and any((operationRef, operationId, response)) or operationRef and any((responseRef, operationId)) or operationId and any((responseRef, operationRef))): raise ValueError( "`responseRef`, `operationRef`, `operationId` and `response` are mutually-exclusive" ) if (operationId or operationRef) and not response: raise ValueError( "`response` is required when `operationRef` or `operationId` are specified" ) if response and not (operationId or operationRef): raise ValueError( "`operationRef` or `operationId` are required when `response` is specified" ) if not any((responseRef, operationRef, operationId)): raise ValueError( "One-of `responseRef`, `operationRef` or `operationId` are required" ) return values
class ModelProgramMetadataInRDF(BaseAggregationMetadataInRDF): rdf_type: AnyUrl = Field(rdf_predicate=RDF.type, const=True, default=HSTERMS.ModelProgramAggregation) label: str = Field(const=True, default="Model Program Content: One or more files with specific metadata") dc_type: AnyUrl = Field(rdf_predicate=DC.type, default=HSTERMS.ModelProgramAggregation, const=True) name: str = Field(rdf_predicate=HSTERMS.modelProgramName, default=None) version: str = Field(rdf_predicate=HSTERMS.modelVersion, default=None) programming_languages: List[str] = Field(rdf_predicate=HSTERMS.modelProgramLanguage, default=[]) operating_systems: List[str] = Field(rdf_predicate=HSTERMS.modelOperatingSystem, default=[]) release_date: date = Field(rdf_predicate=HSTERMS.modelReleaseDate, default=None) website: AnyUrl = Field(rdf_predicate=HSTERMS.modelWebsite, default=None) code_repository: AnyUrl = Field(rdf_predicate=HSTERMS.modelCodeRepository, default=None) program_schema_json: AnyUrl = Field(rdf_predicate=HSTERMS.modelProgramSchema, default=None) release_notes: List[str] = Field(rdf_predicate=HSTERMS.modelReleaseNotes, default=[]) documentation: List[str] = Field(rdf_predicate=HSTERMS.modelDocumentation, default=[]) software: List[str] = Field(rdf_predicate=HSTERMS.modelSoftware, default=[]) engine: List[str] = Field(rdf_predicate=HSTERMS.modelEngine, default=[]) _parse_file_types = root_validator(pre=True, allow_reuse=True)(rdf_parse_file_types)
class ModelInstanceMetadata(ModelInstanceMetadataIn): type: AggregationType = Field( const=True, default=AggregationType.ModelInstanceAggregation, title="Aggregation type", description="A string expressing the aggregation type from the list of HydroShare aggregation types", allow_mutation=False, ) url: AnyUrl = Field( title="Aggregation URL", description="An object containing the URL of the aggregation", allow_mutation=False ) rights: Rights = Field( default=None, title="Rights statement", description="An object containing information about the rights held in and over the aggregation and the license under which a aggregation is shared", ) _parse_url = root_validator(pre=True, allow_reuse=True)(parse_url)
def _get_friction_root_validator( frictionid_attr: str, frictiontype_attr: str, frictionvalue_attr: str, ): """ Make a root_validator that verifies whether the crosssection definition (subclass) has a valid friction specification. Supposed to be embedded in subclasses for their friction fields. Args: frictionid_attr: name of the frictionid attribute in the subclass. frictiontype_attr: name of the frictiontype attribute in the subclass. frictionvalue_attr: name of the frictionvalue attribute in the subclass. Returns: root_validator: to be embedded in the subclass that needs it. """ def validate_friction_specification(cls, values): """ The actual validator function. Args: cls: The subclass for which the root_validator is called. values (dict): Dictionary of values to create a CrossSectionDefinition subclass. """ frictionid = values.get(frictionid_attr) or "" frictiontype = values.get(frictiontype_attr) or "" frictionvalue = values.get(frictionvalue_attr) or "" if frictionid != "": if frictiontype != "" or frictionvalue != "": raise ValueError( f"Cross section has duplicate friction specification (both {frictionid_attr} and {frictiontype_attr}/{frictionvalue_attr})." ) return values return root_validator(allow_reuse=True)(validate_friction_specification)
class RelationInRDF(RDFBaseModel): isExecutedBy: str = Field(rdf_predicate=HSTERMS.isExecutedBy, default=None) isCreatedBy: str = Field(rdf_predicate=HSTERMS.isCreatedBy, default=None) isDescribedBy: str = Field(rdf_predicate=HSTERMS.isDescribedBy, default=None) isPartOf: str = Field(rdf_predicate=DCTERMS.isPartOf, default=None) hasPart: str = Field(rdf_predicate=DCTERMS.hasPart, default=None) isVersionOf: str = Field(rdf_predicate=DCTERMS.isVersionOf, default=None) isReplacedBy: str = Field(rdf_predicate=DCTERMS.isReplacedBy, default=None) conformsTo: str = Field(rdf_predicate=DCTERMS.conformsTo, default=None) hasFormat: str = Field(rdf_predicate=DCTERMS.hasFormat, default=None) isFormatOf: str = Field(rdf_predicate=DCTERMS.isFormatOf, default=None) isRequiredBy: str = Field(rdf_predicate=DCTERMS.isRequiredBy, default=None) requires: str = Field(rdf_predicate=DCTERMS.requires, default=None) isReferencedBy: str = Field(rdf_predicate=DCTERMS.isReferencedBy, default=None) references: str = Field(rdf_predicate=DCTERMS.references, default=None) replaces: str = Field(rdf_predicate=DCTERMS.replaces, default=None) source: str = Field(rdf_predicate=DCTERMS.source, default=None) _parse_relation = root_validator(pre=True)(parse_relation_rdf)
class AclStandardIPv4Entry(AclBaseEntry): src_address: Union[ipaddress.IPv4Address, ipaddress.IPv4Network, Literal['any']] src_wildcard: Optional[Union[constr(regex=r"(?:\d{1,3}\.){3}(?:\d{1,3})")]] root_validator(allow_reuse=True) def wildcard_required(cls, values): src_wildcard = values.get('src_wildcard') if isinstance(values.get('src_address'), ipaddress.IPv4Network): if src_wildcard is not None: msg = f"If 'src_address' is specified with netmask, 'src_wildcard' must be None" raise AssertionError(msg) if isinstance(values.get('src_address'), ipaddress.IPv4Address): if src_wildcard is None: msg = f"If 'src_address' is specified as without netmask, 'src_wildcard' must be set" raise AssertionError(msg) if values.get('src_address') == 'any': if src_wildcard is not None: msg = f"If 'src_address' is any, 'src_wildcard' must be None" raise AssertionError(msg) return values