class BoldFileSchema(BaseFileSchema): datatype = fields.Str(default="func", validate=validate.Equal("func")) suffix = fields.Str(default="bold", validate=validate.Equal("bold")) extension = fields.Str(validate=validate.OneOf([".nii", ".nii.gz"])) tags = fields.Nested(BoldTagsSchema, default=dict()) metadata = fields.Nested(BoldMetadataSchema)
class BoldFileSchema(BaseFileSchema): datatype = fields.Str(dump_default="func", validate=validate.Equal("func")) suffix = fields.Str(dump_default="bold", validate=validate.Equal("bold")) extension = fields.Str(validate=validate.OneOf([".nii", ".nii.gz"])) tags = fields.Nested(BoldTagsSchema(), dump_default=dict()) metadata = fields.Nested(BoldMetadataSchema(), dump_default=dict()) @pre_load def move_dir_tag_to_metadata(self, in_data, **kwargs): path = Path(in_data["path"]) if not path.is_file(): # this obj does not refer to a specific file return in_data metadata = in_data.get("metadata") if not isinstance(metadata, dict): metadata = dict() in_data["metadata"] = metadata tags = in_data.get("tags") if isinstance(tags, dict): direction = tags.get("dir") if isinstance(direction, str): try: pedir_code = parse_direction_str(direction) metadata[ "phase_encoding_direction"] = canonicalize_direction_code( pedir_code, path, ) del tags["dir"] except Exception: pass return in_data
def test_equal_repr(): assert repr( validate.Equal(comparable=123, error=None) ) == "<Equal(comparable=123, error={!r})>".format("Must be equal to {other}.") assert repr( validate.Equal(comparable=123, error="foo") ) == "<Equal(comparable=123, error={!r})>".format("foo")
class TrainingSchema(BaseSchema): version = fields.String(required=True, validate=validate.Equal('v1')) kind = fields.String(required=True, validate=validate.Equal('Training')) spec = PolyField( serialization_schema_selector=training_property_schema_serialization_disambiguation, deserialization_schema_selector=trainging_property_schema_deserialization_disambiguation, many=True )
def test_equal_repr(): assert (repr(validate.Equal( comparable=123, error=None)) == '<Equal(comparable=123, error={0!r})>'.format( 'Must be equal to {other}.')) assert (repr(validate.Equal( comparable=123, error='foo')) == '<Equal(comparable=123, error={0!r})>'.format('foo'))
class MixedEffectsModelSchema(BaseModelSchema): type = fields.Str(dump_default="me", validate=validate.Equal("me")) across = fields.Str(dump_default="sub", validate=validate.Equal("sub")) algorithms = fields.List( fields.Str(), dump_default=["flame1", "mcartest", "heterogeneity"], load_default=["flame1", "mcartest", "heterogeneity"], )
class MatrixLogUniformSchema(BaseSchema): kind = fields.Str(allow_none=True, validate=validate.Equal("loguniform")) value = LogUniform(allow_none=True) @staticmethod def schema_config(): return MatrixLogUniformConfig
class MatrixGeomSpaceSchema(BaseSchema): kind = fields.Str(allow_none=True, validate=validate.Equal("geomspace")) value = GeomSpace(allow_none=True) @staticmethod def schema_config(): return MatrixGeomSpaceConfig
class MatrixRangeSchema(BaseSchema): kind = fields.Str(allow_none=True, validate=validate.Equal("range")) value = Range(allow_none=True) @staticmethod def schema_config(): return MatrixRangeConfig
class MatrixChoiceSchema(BaseSchema): kind = fields.Str(allow_none=True, validate=validate.Equal("choice")) value = fields.List(fields.Raw(), allow_none=True) @staticmethod def schema_config(): return MatrixChoiceConfig
class DagRefSchema(BaseCamelSchema): kind = fields.Str(allow_none=True, validate=validate.Equal("dag_ref")) name = fields.Str(required=True) @staticmethod def schema_config(): return V1DagRef
class SparkSchema(BaseCamelSchema): kind = fields.Str(allow_none=True, validate=validate.Equal(V1RunKind.SPARK)) connections = fields.List(fields.Str(), allow_none=True) volumes = fields.List(SwaggerField(cls=k8s_schemas.V1Volume), allow_none=True) type = fields.Str(allow_none=True, validate=validate.OneOf(V1SparkType.allowable_values)) spark_version = fields.Str(allow_none=True) python_version = fields.Str(allow_none=True, validate=validate.OneOf( V1SparkDeploy.allowable_values)) deploy_mode = fields.Str(allow_none=True) main_class = fields.Str(allow_none=True) main_application_file = fields.Str(allow_none=True) arguments = fields.List(fields.Str(), allow_none=True) hadoop_conf = fields.Dict(keys=fields.Str(), values=fields.Str(), allow_none=True) spark_conf = fields.Dict(keys=fields.Str(), values=fields.Str(), allow_none=True) hadoop_config_map = fields.Str(allow_none=True) spark_config_map = fields.Str(allow_none=True) executor = fields.Nested(SparkReplicaSchema, allow_none=True) driver = fields.Nested(SparkReplicaSchema, allow_none=True) @staticmethod def schema_config(): return V1Spark
class MedianStoppingPolicySchema(BaseSchema): kind = fields.Str(allow_none=True, validate=validate.Equal('median')) evaluation_interval = fields.Int() @staticmethod def schema_config(): return MedianStoppingPolicyConfig
class SparkSchema(BaseSchema): kind = fields.Str(allow_none=True, validate=validate.Equal("spark")) spec = RefOrObject(fields.Raw(required=True)) @staticmethod def schema_config(): return SparkConfig
class MappingSchema(BaseSchema): kind = fields.Str(allow_none=True, validate=validate.Equal("mapping")) values = RefOrObject(fields.List(fields.Dict(), allow_none=True)) @staticmethod def schema_config(): return MappingConfig
class NotifierSchema(JobSchema): kind = fields.Str(allow_none=True, validate=validate.Equal(V1RunKind.NOTIFIER)) @staticmethod def schema_config(): return V1Notifier
class ExactTimeScheduleSchema(BaseCamelSchema): kind = fields.Str(allow_none=True, validate=validate.Equal("exact_time")) start_at = fields.DateTime(required=True) @staticmethod def schema_config(): return V1ExactTimeSchedule
class WatchDogSchema(JobSchema): kind = fields.Str(allow_none=True, validate=validate.Equal(V1RunKind.WATCHDOG)) @staticmethod def schema_config(): return V1WatchDog
class GroupSchema(ExperimentSchema): kind = fields.Str(allow_none=True, validate=validate.Equal('group')) hptuning = fields.Nested(HPTuningSchema, allow_none=True) @staticmethod def schema_config(): return GroupConfig
class MatrixQLogNormalSchema(BaseSchema): kind = fields.Str(allow_none=True, validate=validate.Equal("qlognormal")) value = QLogNormal(allow_none=True) @staticmethod def schema_config(): return MatrixQLogNormalConfig
class RaySchema(BaseCamelSchema): kind = fields.Str(allow_none=True, validate=validate.Equal(V1RunKind.RAY)) spec = RefOrObject(fields.Raw(required=True)) @staticmethod def schema_config(): return V1Ray
class ExactTimeScheduleSchema(BaseSchema): kind = fields.Str(allow_none=True, validate=validate.Equal("exact_time")) start_at = RefOrObject(fields.LocalDateTime(required=True), required=True) @staticmethod def schema_config(): return ExactTimeScheduleConfig
class ExperimentSchema(BaseRunSchema): kind = fields.Str(allow_none=None, validate=validate.Equal('experiment')) declarations = fields.Raw(allow_none=True) environment = fields.Nested(ExperimentEnvironmentSchema, allow_none=True) backend = fields.Str(allow_none=True, validate=validate.OneOf(ExperimentBackend.VALUES)) framework = fields.Str(allow_none=True) run = fields.Nested(RunSchema, allow_none=True) model = fields.Nested(ModelSchema, allow_none=True) train = fields.Nested(TrainSchema, allow_none=True) eval = fields.Nested(EvalSchema, allow_none=True) @staticmethod def schema_config(): return ExperimentConfig @validates_schema def validate_backend(self, data): """Validate backend""" validate_backend(data.get('backend')) @validates_schema def validate_replicas(self, data): """Validate distributed experiment""" environment = data.get('environment') if environment and environment.replicas: validate_replicas(data.get('framework'), environment.replicas)
class CleanerSchema(JobSchema): kind = fields.Str(allow_none=True, validate=validate.Equal(V1RunKind.CLEANER)) @staticmethod def schema_config(): return V1Cleaner
class HpLinSpaceSchema(BaseCamelSchema): kind = fields.Str(allow_none=True, validate=validate.Equal("linspace")) value = LinSpace(allow_none=True) @staticmethod def schema_config(): return V1HpLinSpace
class RegistryPackageManifest: """Base class for registry packages.""" conditions: Dict[str, ConditionSemVer] version: SemVer categories: List[str] = field(default_factory=lambda: ['security']) description: str = 'Rules for the detection engine in the Security application.' format_version: SemVer = field( metadata=dict(validate=validate.Equal('1.0.0')), default='1.0.0') icons: list = field(default_factory=list) internal: bool = True license: str = 'basic' name: str = 'detection_rules' owner: Dict[str, str] = field( default_factory=lambda: dict(github='elastic/protections')) policy_templates: list = field(default_factory=list) release: str = 'experimental' screenshots: list = field(default_factory=list) title: str = 'Detection rules' type: str = 'integration' @classmethod def get_schema(cls) -> Type[Schema]: return class_schema(cls) @classmethod def from_dict(cls, obj: dict) -> 'RegistryPackageManifest': return cls.get_schema()().load(obj) def asdict(self) -> dict: return self.get_schema()().dump(self)
class UrlRefSchema(BaseCamelSchema): kind = fields.Str(allow_none=True, validate=validate.Equal("url_ref")) url = fields.Str(required=True) @staticmethod def schema_config(): return V1UrlRef
class HpQLogUniformSchema(BaseCamelSchema): kind = fields.Str(allow_none=True, validate=validate.Equal("qloguniform")) value = QLogUniform(allow_none=True) @staticmethod def schema_config(): return V1HpQLogUniform
def validate(self) -> None: exceptions: List[ValidationError] = [] # verify that the metadata file is present and valid try: metadata = load_metadata(self.contents) except ValidationError as exc: exceptions.append(exc) metadata = None for file_name, content in self.contents.items(): prefix = file_name.split("/")[0] schema = schemas.get(f"{prefix}/") if schema: try: config = load_yaml(file_name, content) schema.load(config) self._configs[file_name] = config except ValidationError as exc: exc.messages = {file_name: exc.messages} exceptions.append(exc) # validate that the type declared in METADATA_FILE_NAME is correct if metadata: type_validator = validate.Equal(SqlaTable.__name__) try: type_validator(metadata["type"]) except ValidationError as exc: exc.messages = {METADATA_FILE_NAME: {"type": exc.messages}} exceptions.append(exc) if exceptions: exception = CommandInvalidError("Error importing dataset") exception.add_list(exceptions) raise exception
class HpLogNormalSchema(BaseCamelSchema): kind = fields.Str(allow_none=True, validate=validate.Equal("lognormal")) value = LogNormal(allow_none=True) @staticmethod def schema_config(): return V1HpLogNormal