Beispiel #1
0
class Badge(_BioImageIOSchema):
    bioimageio_description = "Custom badge."
    label = fields.String(required=True,
                          bioimageio_description="e.g. 'Open in Colab'")
    icon = fields.String(
        bioimageio_description=
        "e.g. 'https://colab.research.google.com/assets/colab-badge.svg'")
    url = fields.Union(
        [fields.URL(), fields.RelativeLocalPath()],
        bioimageio_description=
        "e.g. 'https://colab.research.google.com/github/HenriquesLab/ZeroCostDL4Mic/blob/master/Colab_notebooks/U-net_2D_ZeroCostDL4Mic.ipynb'",
    )
Beispiel #2
0
class CiteEntry(_BioImageIOSchema):
    text = fields.String(required=True,
                         bioimageio_description="free text description")
    doi = fields.DOI(bioimageio_maybe_required=True,
                     bioimageio_description=
                     "digital object identifier, see https://www.doi.org/")
    url = fields.String(
        bioimageio_maybe_required=True)  # todo: change to fields.URL

    @validates_schema
    def doi_or_url(self, data, **kwargs):
        if data.get("doi") is None and data.get("url") is None:
            raise ValidationError(
                "doi or url needs to be specified in a citation")
Beispiel #3
0
class _WeightsEntryBase(_BioImageIOSchema):
    authors = fields.List(
        fields.Nested(Author()),
        bioimageio_description="A list of authors. If this is the root weight (it does not have a `parent` field): the "
        "person(s) that have trained this model. If this is a child weight (it has a `parent` field): the person(s) "
        "who have converted the weights to this format.",
    )  # todo: copy root authors if missing
    attachments = fields.Dict(
        fields.String(),
        fields.List(fields.Union([fields.URI(), fields.Raw()])),
        bioimageio_description="Dictionary of text keys and list values (that may contain any valid yaml) to "
        "additional, relevant files that are specific to the current weight format. A list of URIs can be listed under"
        " the `files` key to included additional files for generating the model package.",
    )
    parent = fields.String(
        bioimageio_description="The source weights used as input for converting the weights to this format. For "
        "example, if the weights were converted from the format `pytorch_state_dict` to `pytorch_script`, the parent "
        "is `pytorch_state_dict`. All weight entries except one (the initial set of weights resulting from training "
        "the model), need to have this field."
    )
    sha256 = fields.String(
        validate=field_validators.Length(equal=64),
        bioimageio_description="SHA256 checksum of the source file specified. " + _common_sha256_hint,
    )
    source = fields.Union(
        [fields.URI(), fields.RelativeLocalPath()],
        required=True,
        bioimageio_description="URI or path to the weights file. Preferably a url.",
    )
    weights_format = fields.String(
        validate=field_validators.OneOf(get_args(raw_nodes.WeightsFormat)), required=True, load_only=True
    )

    @post_load
    def make_object(self, data, **kwargs):
        data.pop("weights_format")  # weights_format was only used to identify correct WeightsEntry schema
        return super().make_object(data, **kwargs)

    @pre_dump
    def raise_on_weights_format_mismatch(self, raw_node, **kwargs):
        """
        ensures to serialize a raw_nodes.<Special>WeightsEntry with the corresponding schema.<Special>WeightsEntry

        This check is required, because no validation is performed by marshmallow on serialization,
        which disables the Union field to select the appropriate nested schema for serialization.
        """
        if self.__class__.__name__ != raw_node.__class__.__name__:
            raise TypeError(f"Cannot serialize {raw_node} with {self}")

        return raw_node
Beispiel #4
0
class _TensorBase(_BioImageIOSchema):
    name = fields.String(
        required=True,
        validate=field_validators.Predicate("isidentifier"),
        bioimageio_description="Tensor name. No duplicates are allowed.",
    )
    description = fields.String()
    axes = fields.Axes(
        required=True,
        bioimageio_description=
        """Axes identifying characters from: bitczyx. Same length and order as the axes in `shape`.

| character | description |
| --- | --- |
|  b  |  batch (groups multiple samples) |
|  i  |  instance/index/element |
|  t  |  time |
|  c  |  channel |
|  z  |  spatial dimension z |
|  y  |  spatial dimension y |
|  x  |  spatial dimension x |""",
    )
    data_type = fields.String(
        required=True,
        bioimageio_description=
        "The data type of this tensor. For inputs, only `float32` is allowed and the consumer "
        "software needs to ensure that the correct data type is passed here. For outputs can be any of `float32, "
        "float64, (u)int8, (u)int16, (u)int32, (u)int64`. The data flow in bioimage.io models is explained "
        "[in this diagram.](https://docs.google.com/drawings/d/1FTw8-Rn6a6nXdkZ_SkMumtcjvur9mtIhRqLwnKqZNHM/edit).",
    )
    data_range = fields.Tuple(
        (fields.Float(allow_nan=True), fields.Float(allow_nan=True)),
        bioimageio_description=
        "Tuple `(minimum, maximum)` specifying the allowed range of the data in this tensor. "
        "If not specified, the full data range that can be expressed in `data_type` is allowed.",
    )
    shape: fields.Union

    processing_name: str

    @validates_schema
    def validate_processing_kwargs(self, data, **kwargs):
        axes = data.get("axes", [])
        processing_list = data.get(self.processing_name, [])
        for processing in processing_list:
            kwargs = processing.kwargs or {}
            kwarg_axes = kwargs.get("axes", "")
            if any(a not in axes for a in kwarg_axes):
                raise ValidationError(
                    "`kwargs.axes` needs to be subset of axes")
Beispiel #5
0
class Postprocessing(Processing):
    name = fields.String(
        validate=field_validators.OneOf(get_args(raw_nodes.PostprocessingName)),
        required=True,
        bioimageio_description=f"Name of postprocessing. One of: {', '.join(get_args(raw_nodes.PostprocessingName))}.",
    )
    kwargs = fields.Kwargs(
        bioimageio_description=f"Key word arguments as described in [postprocessing spec]"
        f"(https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/postprocessing_spec_"
        f"{'_'.join(get_args(raw_nodes.FormatVersion)[-1].split('.')[:2])}.md)."
    )

    class scale_range(Preprocessing.scale_range):
        reference_tensor = fields.String(
            required=False,
            validate=field_validators.Predicate("isidentifier"),
            bioimageio_description="Tensor name to compute the percentiles from. Default: The tensor itself. "
            "If mode==per_dataset this needs to be the name of an input tensor.",
        )

    class scale_mean_variance(SharedProcessingSchema):
        bioimageio_description = "Scale the tensor s.t. its mean and variance match a reference tensor."
        mode = fields.ProcMode(required=True, valid_modes=("per_dataset", "per_sample"))
        reference_tensor = fields.String(
            required=True,
            validate=field_validators.Predicate("isidentifier"),
            bioimageio_description="Name of tensor to match.",
        )
Beispiel #6
0
 class scale_range(Preprocessing.scale_range):
     reference_tensor = fields.String(
         required=False,
         validate=field_validators.Predicate("isidentifier"),
         bioimageio_description="Tensor name to compute the percentiles from. Default: The tensor itself. "
         "If mode==per_dataset this needs to be the name of an input tensor.",
     )
Beispiel #7
0
 def test_error_messages(self):
     union = fields.Union([fields.String(), fields.Number()])
     try:
         union.deserialize([1])
     except ValidationError as e:
         assert isinstance(e, ValidationError)
         assert len(e.messages) == 3, e.messages
Beispiel #8
0
 class scale_mean_variance(SharedProcessingSchema):
     bioimageio_description = "Scale the tensor s.t. its mean and variance match a reference tensor."
     mode = fields.ProcMode(required=True, valid_modes=("per_dataset", "per_sample"))
     reference_tensor = fields.String(
         required=True,
         validate=field_validators.Predicate("isidentifier"),
         bioimageio_description="Name of tensor to match.",
     )
Beispiel #9
0
class TorchscriptWeightsEntry(_WeightsEntryBase):
    raw_nodes = raw_nodes

    bioimageio_description = "Torchscript weights format"
    weights_format = fields.String(
        validate=field_validators.Equal("torchscript"),
        required=True,
        load_only=True)
    pytorch_version = fields.Version()
Beispiel #10
0
class _Person(_BioImageIOSchema):
    name = fields.String(bioimageio_description="Full name.")
    affiliation = fields.String(bioimageio_description="Affiliation.")
    email = fields.Email()
    github_user = fields.String(
        bioimageio_description="GitHub user name.")  # todo: add validation?
    orcid = fields.String(
        validate=[
            field_validators.Length(19),
            lambda oid: all(oid[idx] == "-" for idx in [4, 9, 14]),
            lambda oid: is_valid_orcid_id(oid.replace("-", "")),
        ],
        bioimageio_description=
        "[orcid](https://support.orcid.org/hc/en-us/sections/360001495313-What-is-ORCID) id "
        "in hyphenated groups of 4 digits, e.g. '0000-0001-2345-6789' (and [valid]("
        "https://support.orcid.org/hc/en-us/articles/360006897674-Structure-of-the-ORCID-Identifier"
        ") as per ISO 7064 11,2.)",
    )
Beispiel #11
0
class RunMode(_BioImageIOSchema):
    name = fields.String(required=True,
                         bioimageio_description="The name of the `run_mode`")
    kwargs = fields.Kwargs()

    @validates("name")
    def warn_on_unrecognized_run_mode(self, value: str):
        if isinstance(value, str):
            self.warn("name", f"Unrecognized run mode '{value}'")
Beispiel #12
0
class PytorchStateDictWeightsEntry(_WeightsEntryBase):
    bioimageio_description = "PyTorch state dictionary weights format"
    weights_format = fields.String(
        validate=field_validators.Equal("pytorch_state_dict"),
        required=True,
        load_only=True)
    architecture = fields.ImportableSource(
        required=True,
        bioimageio_description=
        "Source code of the model architecture that either points to a "
        "local implementation: `<relative path to file>:<identifier of implementation within the file>` or the "
        "implementation in an available dependency: `<root-dependency>.<sub-dependency>.<identifier>`.\nFor example: "
        "`my_function.py:MyImplementation` or `bioimageio.core.some_module.some_class_or_function`.",
    )
    architecture_sha256 = fields.String(
        bioimageio_maybe_required=True,
        validate=field_validators.Length(equal=64),
        bioimageio_description=
        "This field is only required if the architecture points to a source file. "
        "SHA256 checksum of the model source code file." +
        _common_sha256_hint.replace(
            "    ", "        "),  # sha256 hint with one more intend level
    )
    kwargs = fields.Kwargs(
        bioimageio_description=
        "Keyword arguments for the implementation specified by `architecture`."
    )
    pytorch_version = fields.Version()

    @validates_schema
    def sha_for_source_code_file(self, data, **kwargs):
        arch = data.get("architecture")
        if isinstance(arch, raw_nodes.ImportableModule):
            return
        elif isinstance(arch, raw_nodes.ImportableSourceFile):
            sha = data.get("architecture_sha256")
            if sha is None:
                raise ValidationError(
                    "When specifying 'architecture' with a callable from a source file, "
                    "the corresponding 'architecture_sha256' field is required."
                )
Beispiel #13
0
class Preprocessing(Processing):
    name = fields.String(
        required=True,
        validate=field_validators.OneOf(get_args(raw_nodes.PreprocessingName)),
        bioimageio_description=f"Name of preprocessing. One of: {', '.join(get_args(raw_nodes.PreprocessingName))}.",
    )
    kwargs = fields.Kwargs(
        bioimageio_description=f"Key word arguments as described in [preprocessing spec]"
        f"(https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/preprocessing_spec_"
        f"{'_'.join(get_args(raw_nodes.FormatVersion)[-1].split('.')[:2])}.md)."
    )

    class scale_range(SharedProcessingSchema):
        bioimageio_description = "Scale with percentiles."
        mode = fields.ProcMode(required=True, valid_modes=("per_dataset", "per_sample"))
        axes = fields.Axes(
            required=True,
            valid_axes="czyx",
            bioimageio_description="The subset of axes to normalize jointly. For example xy to normalize the two image "
            "axes for 2d data jointly. The batch axis (b) is not valid here.",
        )
        min_percentile = fields.Float(
            default=0,
            validate=field_validators.Range(0, 100, min_inclusive=True, max_inclusive=False),
            bioimageio_description="The lower percentile used for normalization, in range 0 to 100. Default value: 0.",
        )
        max_percentile = fields.Float(
            default=100,
            validate=field_validators.Range(1, 100, min_inclusive=False, max_inclusive=True),
            bioimageio_description="The upper percentile used for normalization, in range 1 to 100. Has to be bigger "
            "than min_percentile. Default value: 100. The range is 1 to 100 instead of 0 to 100 to avoid mistakenly "
            "accepting percentiles specified in the range 0.0 to 1.0.",
        )
        eps = fields.Float(
            missing=1e-6,
            bioimageio_description="Epsilon for numeric stability: "
            "`out = (tensor - v_lower) / (v_upper - v_lower + eps)`; "
            "with `v_lower,v_upper` values at the respective percentiles. Default value: 10^-6.",
        )

        @validates_schema
        def min_smaller_max(self, data, **kwargs):
            min_p = data.get("min_percentile", 0)
            max_p = data.get("max_percentile", 100)
            if min_p >= max_p:
                raise ValidationError(f"min_percentile {min_p} >= max_percentile {max_p}")
Beispiel #14
0
class ImplicitOutputShape(SharedBioImageIOSchema):
    reference_tensor = fields.String(required=True, bioimageio_description="Name of the reference tensor.")
    scale = fields.List(
        fields.Float(), required=True, bioimageio_description="'output_pix/input_pix' for each dimension."
    )
    offset = fields.List(
        fields.Float(), required=True, bioimageio_description="Position of origin wrt to input. Multiple of 0.5."
    )

    @validates_schema
    def matching_lengths(self, data, **kwargs):
        scale = data["scale"]
        offset = data["offset"]
        if len(scale) != len(offset):
            raise ValidationError(f"scale {scale} has to have same length as offset {offset}!")

    @validates("offset")
    def double_offset_is_int(self, value: List[float]):
        for v in value:
            if 2 * v != int(2 * v):
                raise ValidationError(f"offset {v} in {value} not a multiple of 0.5!")
Beispiel #15
0
class Model(rdf.schema.RDF):
    raw_nodes = raw_nodes

    class Meta:
        unknown = RAISE

    bioimageio_description = f"""# BioImage.IO Model Resource Description File Specification {get_args(raw_nodes.FormatVersion)[-1]}
This specification defines the fields used in a BioImage.IO-compliant resource description file (`RDF`) for describing AI models with pretrained weights.
These fields are typically stored in YAML files which we called Model Resource Description Files or `model RDF`.
The model RDFs can be downloaded or uploaded to the bioimage.io website, produced or consumed by BioImage.IO-compatible consumers(e.g. image analysis software or other website).

The model RDF YAML file contains mandatory and optional fields. In the following description, optional fields are indicated by _optional_.
_optional*_ with an asterisk indicates the field is optional depending on the value in another field.
"""
    # todo: unify authors with RDF (optional or required?)
    authors = fields.List(
        fields.Nested(Author()), required=True, bioimageio_description=rdf.schema.RDF.authors_bioimageio_description
    )

    badges = missing_
    cite = fields.List(
        fields.Nested(CiteEntry()),
        required=True,  # todo: unify authors with RDF (optional or required?)
        bioimageio_description=rdf.schema.RDF.cite_bioimageio_description,
    )

    documentation = fields.Union(
        [
            fields.URL(),
            fields.RelativeLocalPath(
                validate=field_validators.Attribute(
                    "suffix",
                    field_validators.Equal(
                        ".md", error="{!r} is invalid; expected markdown file with '.md' extension."
                    ),
                )
            ),
        ],
        required=True,
        bioimageio_description="Relative path to file with additional documentation in markdown. This means: 1) only "
        "relative file path is allowed 2) the file must be in markdown format with `.md` file name extension 3) URL is "
        "not allowed. It is recommended to use `README.md` as the documentation name.",
    )

    download_url = missing_

    dependencies = fields.Dependencies(  # todo: add validation (0.4.0?)
        bioimageio_description="Dependency manager and dependency file, specified as `<dependency manager>:<relative "
        "path to file>`. For example: 'conda:./environment.yaml', 'maven:./pom.xml', or 'pip:./requirements.txt'"
    )

    format_version = fields.String(
        validate=field_validators.OneOf(get_args_flat(raw_nodes.FormatVersion)),
        required=True,
        bioimageio_description_order=0,
        bioimageio_description=f"""Version of the BioImage.IO Model Resource Description File Specification used.
This is mandatory, and important for the consumer software to verify before parsing the fields.
The recommended behavior for the implementation is to keep backward compatibility and throw an error if the model yaml
is in an unsupported format version. The current format version described here is
{get_args(raw_nodes.FormatVersion)[-1]}""",
    )

    framework = fields.String(
        validate=field_validators.OneOf(get_args(raw_nodes.Framework)),
        bioimageio_description=f"The deep learning framework of the source code. One of: "
        f"{', '.join(get_args(raw_nodes.Framework))}. This field is only required if the field `source` is present.",
    )

    git_repo = fields.String(
        validate=field_validators.URL(schemes=["http", "https"]),
        bioimageio_description=rdf.schema.RDF.git_repo_bioimageio_description
        + "If the model is contained in a subfolder of a git repository, then a url to the exact folder"
        + "(which contains the configuration yaml file) should be used.",
    )

    icon = missing_

    kwargs = fields.Kwargs(
        bioimageio_description="Keyword arguments for the implementation specified by `source`. "
        "This field is only required if the field `source` is present."
    )

    language = fields.String(
        validate=field_validators.OneOf(get_args(raw_nodes.Language)),
        bioimageio_maybe_required=True,
        bioimageio_description=f"Programming language of the source code. One of: "
        f"{', '.join(get_args(raw_nodes.Language))}. This field is only required if the field `source` is present.",
    )

    license = fields.String(
        required=True,  # todo: unify license with RDF (optional or required?)
        bioimageio_description=rdf.schema.RDF.license_bioimageio_description,
    )

    name = fields.String(
        # validate=field_validators.Length(max=36),  # todo: enforce in future version (0.4.0?)
        required=True,
        bioimageio_description="Name of this model. It should be human-readable and only contain letters, numbers, "
        "underscore '_',  minus '-' or spaces and not be longer than 36 characters.",
    )

    packaged_by = fields.List(
        fields.Nested(Author()),
        bioimageio_description=f"The persons that have packaged and uploaded this model. Only needs to be specified if "
        f"different from `authors` in root or any entry in `weights`.",
    )

    parent = fields.Nested(
        ModelParent(),
        bioimageio_description="Parent model from which the trained weights of this model have been derived, e.g. by "
        "finetuning the weights of this model on a different dataset. For format changes of the same trained model "
        "checkpoint, see `weights`.",
    )

    run_mode = fields.Nested(
        RunMode(),
        bioimageio_description="Custom run mode for this model: for more complex prediction procedures like test time "
        "data augmentation that currently cannot be expressed in the specification. "
        "No standard run modes are defined yet.",
    )

    sha256 = fields.String(
        validate=field_validators.Length(equal=64),
        bioimageio_description="SHA256 checksum of the model source code file."
        + _common_sha256_hint
        + " This field is only required if the field source is present.",
    )

    source = fields.ImportableSource(
        bioimageio_maybe_required=True,
        bioimageio_description="Language and framework specific implementation. As some weights contain the model "
        "architecture, the source is optional depending on the present weight formats. `source` can either point to a "
        "local implementation: `<relative path to file>:<identifier of implementation within the source file>` or the "
        "implementation in an available dependency: `<root-dependency>.<sub-dependency>.<identifier>`.\nFor example: "
        "`my_function.py:MyImplementation` or `core_library.some_module.some_function`.",
    )

    timestamp = fields.DateTime(
        required=True,
        bioimageio_description="Timestamp of the initial creation of this model in [ISO 8601]"
        "(#https://en.wikipedia.org/wiki/ISO_8601) format.",
    )

    weights = fields.Dict(
        fields.String(
            validate=field_validators.OneOf(get_args(raw_nodes.WeightsFormat)),
            required=True,
            bioimageio_description=f"Format of this set of weights. Weight formats can define additional (optional or "
            f"required) fields. See [weight_formats_spec_0_3.md]"
            f"(https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/weight_formats_spec_0_3.md). "
            f"One of: {', '.join(get_args(raw_nodes.WeightsFormat))}",
        ),
        fields.Union([fields.Nested(we()) for we in get_args(WeightsEntry)]),
        required=True,
        bioimageio_description="The weights for this model. Weights can be given for different formats, but should "
        "otherwise be equivalent. The available weight formats determine which consumers can use this model.",
    )

    @pre_load
    def add_weights_format_key_to_weights_entry_value(self, data: dict, many=False, partial=False, **kwargs):
        data = deepcopy(data)  # Schema.validate() calls pre_load methods, thus we should not modify the input data
        if many or partial:
            raise NotImplementedError

        for weights_format, weights_entry in data.get("weights", {}).items():
            if "weights_format" in weights_entry:
                raise ValidationError(f"Got unexpected key 'weights_format' in weights entry {weights_format}")

            weights_entry["weights_format"] = weights_format

        return data

    inputs = fields.List(
        fields.Nested(InputTensor()), bioimageio_description="Describes the input tensors expected by this model."
    )
    outputs = fields.List(
        fields.Nested(OutputTensor()), bioimageio_description="Describes the output tensors from this model."
    )

    test_inputs = fields.List(
        fields.Union([fields.URI(), fields.RelativeLocalPath()]),
        required=True,
        bioimageio_description="List of URIs or local relative paths to test inputs as described in inputs for "
        "**a single test case**. "
        "This means if your model has more than one input, you should provide one URI for each input. "
        "Each test input should be a file with a ndarray in "
        "[numpy.lib file format](https://numpy.org/doc/stable/reference/generated/numpy.lib.format.html#module-numpy.lib.format)."
        "The extension must be '.npy'.",
    )
    test_outputs = fields.List(
        fields.Union([fields.URI(), fields.RelativeLocalPath()]),
        required=True,
        bioimageio_description="Analog to to test_inputs.",
    )

    sample_inputs = fields.List(
        fields.Union([fields.URI(), fields.RelativeLocalPath()]),
        bioimageio_description="List of URIs/local relative paths to sample inputs to illustrate possible inputs for "
        "the model, for example stored as png or tif images. "
        "The model is not tested with these sample files that serve to inform a human user about an example use case.",
    )
    sample_outputs = fields.List(
        fields.Union([fields.URI(), fields.RelativeLocalPath()]),
        bioimageio_description="List of URIs/local relative paths to sample outputs corresponding to the "
        "`sample_inputs`.",
    )

    config = fields.YamlDict(
        bioimageio_description=rdf.schema.RDF.config_bioimageio_description
        + """

    For example:
    ```yaml
    config:
      # custom config for DeepImageJ, see https://github.com/bioimage-io/configuration/issues/23
      deepimagej:
        model_keys:
          # In principle the tag "SERVING" is used in almost every tf model
          model_tag: tf.saved_model.tag_constants.SERVING
          # Signature definition to call the model. Again "SERVING" is the most general
          signature_definition: tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
        test_information:
          input_size: [2048x2048] # Size of the input images
          output_size: [1264x1264 ]# Size of all the outputs
          device: cpu # Device used. In principle either cpu or GPU
          memory_peak: 257.7 Mb # Maximum memory consumed by the model in the device
          runtime: 78.8s # Time it took to run the model
          pixel_size: [9.658E-4µmx9.658E-4µm] # Size of the pixels of the input
    ```
"""
    )

    @validates_schema
    def language_and_framework_match(self, data, **kwargs):
        field_names = ("language", "framework")
        valid_combinations = [
            ("python", "scikit-learn"),  # todo: remove
            ("python", "pytorch"),
            ("python", "tensorflow"),
            ("java", "tensorflow"),
        ]
        if "source" not in data:
            valid_combinations.append((missing_, missing_))
            valid_combinations.append(("python", missing_))
            valid_combinations.append(("java", missing_))

        combination = tuple(data.get(name, missing_) for name in field_names)
        if combination not in valid_combinations:
            raise ValidationError(f"invalid combination of {dict(zip(field_names, combination))}")

    @validates_schema
    def source_specified_if_required(self, data, **kwargs):
        if "source" in data:
            return

        weights_format_requires_source = {
            "pytorch_state_dict": True,
            "pytorch_script": False,
            "keras_hdf5": False,
            "tensorflow_js": False,
            "tensorflow_saved_model_bundle": False,
            "onnx": False,
        }
        require_source = {wf for wf in data["weights"] if weights_format_requires_source[wf]}
        if require_source:
            raise ValidationError(
                f"These specified weight formats require source code to be specified: {require_source}"
            )

    @validates_schema
    def validate_reference_tensor_names(self, data, **kwargs):
        valid_input_tensor_references = [ipt.name for ipt in data["inputs"]]
        for out in data["outputs"]:
            if out.postprocessing is missing_:
                continue

            for postpr in out.postprocessing:
                if postpr.kwargs is missing_:
                    continue

                ref_tensor = postpr.kwargs.get("reference_tensor", missing_)
                if ref_tensor is not missing_ and ref_tensor not in valid_input_tensor_references:
                    raise ValidationError(f"{ref_tensor} not found in inputs")

    @validates_schema
    def weights_entries_match_weights_formats(self, data, **kwargs):
        weights: typing.Dict[str, _WeightsEntryBase] = data["weights"]
        for weights_format, weights_entry in weights.items():
            if weights_format in ["keras_hdf5", "tensorflow_js", "tensorflow_saved_model_bundle"]:
                assert isinstance(
                    weights_entry,
                    (
                        raw_nodes.KerasHdf5WeightsEntry,
                        raw_nodes.TensorflowJsWeightsEntry,
                        raw_nodes.TensorflowSavedModelBundleWeightsEntry,
                    ),
                )
                if weights_entry.tensorflow_version is missing_:
                    # todo: raise ValidationError (allow -> require)?
                    warnings.warn(f"missing 'tensorflow_version' entry for weights format {weights_format}")

            if weights_format == "onnx":
                assert isinstance(weights_entry, raw_nodes.OnnxWeightsEntry)
                if weights_entry.opset_version is missing_:
                    # todo: raise ValidationError?
                    warnings.warn(f"missing 'opset_version' entry for weights format {weights_format}")
Beispiel #16
0
class TensorflowSavedModelBundleWeightsEntry(_WeightsEntryBase):
    bioimageio_description = "Tensorflow Saved Model Bundle weights format"
    weights_format = fields.String(
        validate=field_validators.Equal("tensorflow_saved_model_bundle"), required=True, load_only=True
    )
    tensorflow_version = fields.Version()
Beispiel #17
0
class TensorflowJsWeightsEntry(_WeightsEntryBase):
    bioimageio_description = "Tensorflow Javascript weights format"
    weights_format = fields.String(validate=field_validators.Equal("tensorflow_js"), required=True, load_only=True)
    tensorflow_version = fields.Version()
Beispiel #18
0
class PytorchScriptWeightsEntry(_WeightsEntryBase):
    bioimageio_description = "Torch Script weights format"
    weights_format = fields.String(validate=field_validators.Equal("pytorch_script"), required=True, load_only=True)
Beispiel #19
0
class PytorchStateDictWeightsEntry(_WeightsEntryBase):
    bioimageio_description = "PyTorch state dictionary weights format"
    weights_format = fields.String(validate=field_validators.Equal("pytorch_state_dict"), required=True, load_only=True)
Beispiel #20
0
class OnnxWeightsEntry(_WeightsEntryBase):
    bioimageio_description = "ONNX weights format"
    weights_format = fields.String(validate=field_validators.Equal("onnx"), required=True, load_only=True)
    opset_version = fields.Integer()
Beispiel #21
0
class KerasHdf5WeightsEntry(_WeightsEntryBase):
    bioimageio_description = "Keras HDF5 weights format"
    weights_format = fields.String(validate=field_validators.Equal("keras_hdf5"), required=True, load_only=True)
    tensorflow_version = fields.Version()
Beispiel #22
0
class LinkedDataset(_BioImageIOSchema):
    id = fields.String(bioimageio_description="dataset id")
Beispiel #23
0
class RunMode(_BioImageIOSchema):
    name = fields.String(
        required=True, bioimageio_description="The name of the `run_mode`"
    )  # todo: limit valid run mode names
    kwargs = fields.Kwargs()
Beispiel #24
0
class Model(rdf.schema.RDF):
    raw_nodes = raw_nodes

    class Meta:
        unknown = RAISE
        exclude = ("source",
                   )  # while RDF does have a source field, Model does not

    bioimageio_description = f"""# BioImage.IO Model Resource Description File Specification {get_args(raw_nodes.FormatVersion)[-1]}
This specification defines the fields used in a BioImage.IO-compliant resource description file (`RDF`) for describing AI models with pretrained weights.
These fields are typically stored in YAML files which we call Model Resource Description Files or `model RDF`.
The model RDFs can be downloaded or uploaded to the bioimage.io website, produced or consumed by BioImage.IO-compatible consumers(e.g. image analysis software or other website).

The model RDF YAML file contains mandatory and optional fields. In the following description, optional fields are indicated by _optional_.
_optional*_ with an asterisk indicates the field is optional depending on the value in another field.
"""
    # todo: sync authors with RDF
    authors = fields.List(
        fields.Nested(rdf.schema.Author()),
        validate=field_validators.Length(min=1),
        required=True,
        bioimageio_description=rdf.schema.RDF.authors_bioimageio_description,
    )

    badges = missing  # todo: allow badges for Model (RDF has it)
    cite = fields.List(
        fields.Nested(rdf.schema.CiteEntry()),
        required=True,  # todo: unify authors with RDF (optional or required?)
        validate=field_validators.Length(min=1),
        bioimageio_description=rdf.schema.RDF.cite_bioimageio_description,
    )

    config = fields.YamlDict(
        bioimageio_description=rdf.schema.RDF.config_bioimageio_description +
        """
For example:
```yaml
config:
  # custom config for DeepImageJ, see https://github.com/bioimage-io/configuration/issues/23
  deepimagej:
    model_keys:
      # In principle the tag "SERVING" is used in almost every tf model
      model_tag: tf.saved_model.tag_constants.SERVING
      # Signature definition to call the model. Again "SERVING" is the most general
      signature_definition: tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
    test_information:
      input_size: [2048x2048] # Size of the input images
      output_size: [1264x1264 ]# Size of all the outputs
      device: cpu # Device used. In principle either cpu or GPU
      memory_peak: 257.7 Mb # Maximum memory consumed by the model in the device
      runtime: 78.8s # Time it took to run the model
      pixel_size: [9.658E-4µmx9.658E-4µm] # Size of the pixels of the input
```
""")

    documentation = fields.Union(
        [
            fields.URL(),
            fields.RelativeLocalPath(validate=field_validators.Attribute(
                "suffix",
                field_validators.Equal(
                    ".md",
                    error=
                    "{!r} is invalid; expected markdown file with '.md' extension."
                ),
            )),
        ],
        required=True,
        bioimageio_description=
        "Relative path or URL to file with additional documentation in markdown. "
        "The file must be in markdown format with `.md` file name extension"
        "It is recommended to use `README.md` as the documentation name.",
    )

    format_version = fields.String(
        validate=field_validators.OneOf(get_args_flat(
            raw_nodes.FormatVersion)),
        required=True,
        bioimageio_description_order=0,
        bioimageio_description=
        f"""Version of the BioImage.IO Model Resource Description File Specification used.
This is mandatory, and important for the consumer software to verify before parsing the fields.
The recommended behavior for the implementation is to keep backward compatibility and throw an error if the model yaml
is in an unsupported format version. The current format version described here is
{get_args(raw_nodes.FormatVersion)[-1]}""",
    )

    git_repo = fields.URL(
        bioimageio_description=rdf.schema.RDF.git_repo_bioimageio_description +
        "If the model is contained in a subfolder of a git repository, then a url to the exact folder"
        + "(which contains the configuration yaml file) should be used.")

    inputs = fields.List(
        fields.Nested(InputTensor()),
        validate=field_validators.Length(min=1),
        required=True,
        bioimageio_description=
        "Describes the input tensors expected by this model.",
    )

    @validates("inputs")
    def no_duplicate_input_tensor_names(
            self, value: typing.List[raw_nodes.InputTensor]):
        if not isinstance(value, list) or not all(
                isinstance(v, raw_nodes.InputTensor) for v in value):
            raise ValidationError(
                "Could not check for duplicate input tensor names due to another validation error."
            )

        names = [t.name for t in value]
        if len(names) > len(set(names)):
            raise ValidationError(
                "Duplicate input tensor names are not allowed.")

    license = fields.String(
        validate=field_validators.OneOf(LICENSES),
        required=True,
        bioimageio_description=rdf.schema.RDF.license_bioimageio_description,
    )

    name = fields.String(
        required=True,
        bioimageio_description=
        "Name of this model. It should be human-readable and only contain letters, numbers, "
        "underscore '_', minus '-' or spaces and not be longer than 64 characters.",
    )

    outputs = fields.List(
        fields.Nested(OutputTensor()),
        validate=field_validators.Length(min=1),
        bioimageio_description="Describes the output tensors from this model.",
    )

    @validates("outputs")
    def no_duplicate_output_tensor_names(
            self, value: typing.List[raw_nodes.OutputTensor]):
        if not isinstance(value, list) or not all(
                isinstance(v, raw_nodes.OutputTensor) for v in value):
            raise ValidationError(
                "Could not check for duplicate output tensor names due to another validation error."
            )

        names = [t["name"] if isinstance(t, dict) else t.name for t in value]
        if len(names) > len(set(names)):
            raise ValidationError(
                "Duplicate output tensor names are not allowed.")

    @validates_schema
    def inputs_and_outputs(self, data, **kwargs):
        ipts: typing.List[raw_nodes.InputTensor] = data.get("inputs")
        outs: typing.List[raw_nodes.OutputTensor] = data.get("outputs")
        if any([
                not isinstance(ipts, list),
                not isinstance(outs, list),
                not all(isinstance(v, raw_nodes.InputTensor) for v in ipts),
                not all(isinstance(v, raw_nodes.OutputTensor) for v in outs),
        ]):
            raise ValidationError(
                "Could not check for duplicate tensor names due to another validation error."
            )

        # no duplicate tensor names
        names = [t.name for t in ipts + outs]  # type: ignore
        if len(names) > len(set(names)):
            raise ValidationError("Duplicate tensor names are not allowed.")

        tensors_by_name: typing.Dict[str,
                                     typing.Union[raw_nodes.InputTensor,
                                                  raw_nodes.OutputTensor]] = {
                                                      t.name: t
                                                      for t in ipts +
                                                      outs  # type: ignore
                                                  }

        # minimum shape leads to valid output:
        # output with subtracted halo has to result in meaningful output even for the minimal input
        # see https://github.com/bioimage-io/spec-bioimage-io/issues/392
        def get_min_shape(t) -> numpy.ndarray:
            if isinstance(t.shape, raw_nodes.ParametrizedInputShape):
                shape = numpy.array(t.shape.min)
            elif isinstance(t.shape, raw_nodes.ImplicitOutputShape):
                shape = get_min_shape(tensors_by_name[t.shape.reference_tensor]
                                      ) * t.shape.scale + 2 * numpy.array(
                                          t.shape.offset)
            else:
                shape = numpy.array(t.shape)

            return shape

        for out in outs:
            if isinstance(
                    out.shape,
                    raw_nodes.ImplicitOutputShape) and len(out.shape) != len(
                        tensors_by_name[out.shape.reference_tensor].shape):
                raise ValidationError(
                    f"Referenced tensor {out.shape.reference_tensor} "
                    f"with {len(tensors_by_name[out.shape.reference_tensor].shape)} dimensions does not match "
                    f"output tensor {out.name} with {len(out.shape)} dimensions."
                )

            min_out_shape = get_min_shape(out)
            if out.halo:
                halo = out.halo
                halo_msg = f" for halo {out.halo}"
            else:
                halo = [0] * len(min_out_shape)
                halo_msg = ""

            if any([s - 2 * h < 1 for s, h in zip(min_out_shape, halo)]):
                raise ValidationError(
                    f"Minimal shape {min_out_shape} of output {out.name} is too small{halo_msg}."
                )

    packaged_by = fields.List(
        fields.Nested(rdf.schema.Author()),
        bioimageio_description=
        f"The persons that have packaged and uploaded this model. Only needs to be specified if "
        f"different from `authors` in root or any entry in `weights`.",
    )

    parent = fields.Nested(
        ModelParent(),
        bioimageio_description=
        "The model from which this model is derived, e.g. by fine-tuning the weights.",
    )

    run_mode = fields.Nested(
        RunMode(),
        bioimageio_description=
        "Custom run mode for this model: for more complex prediction procedures like test time "
        "data augmentation that currently cannot be expressed in the specification. "
        "No standard run modes are defined yet.",
    )

    sample_inputs = fields.List(
        fields.Union([fields.URI(), fields.RelativeLocalPath()]),
        validate=field_validators.Length(min=1),
        bioimageio_description=
        "List of URIs/local relative paths to sample inputs to illustrate possible inputs for "
        "the model, for example stored as png or tif images. "
        "The model is not tested with these sample files that serve to inform a human user about an example use case.",
    )
    sample_outputs = fields.List(
        fields.Union([fields.URI(), fields.RelativeLocalPath()]),
        validate=field_validators.Length(min=1),
        bioimageio_description=
        "List of URIs/local relative paths to sample outputs corresponding to the "
        "`sample_inputs`.",
    )

    test_inputs = fields.List(
        fields.Union([fields.URI(), fields.RelativeLocalPath()]),
        validate=field_validators.Length(min=1),
        required=True,
        bioimageio_description=
        "List of URIs or local relative paths to test inputs as described in inputs for "
        "**a single test case**. "
        "This means if your model has more than one input, you should provide one URI for each input."
        "Each test input should be a file with a ndarray in "
        "[numpy.lib file format](https://numpy.org/doc/stable/reference/generated/numpy.lib.format.html#module-numpy.lib.format)."
        "The extension must be '.npy'.",
    )
    test_outputs = fields.List(
        fields.Union([fields.URI(), fields.RelativeLocalPath()]),
        validate=field_validators.Length(min=1),
        required=True,
        bioimageio_description="Analog to test_inputs.",
    )

    timestamp = fields.DateTime(
        required=True,
        bioimageio_description=
        "Timestamp of the initial creation of this model in [ISO 8601]"
        "(#https://en.wikipedia.org/wiki/ISO_8601) format.",
    )

    training_data = fields.Union(
        [fields.Nested(Dataset()),
         fields.Nested(LinkedDataset())])

    weights = fields.Dict(
        fields.String(
            validate=field_validators.OneOf(get_args(raw_nodes.WeightsFormat)),
            required=True,
            bioimageio_description="Format of this set of weights. "
            f"One of: {', '.join(get_args(raw_nodes.WeightsFormat))}",
        ),
        fields.Union(
            [fields.Nested(we()) for we in get_args(WeightsEntry)],
            short_bioimageio_description=
            ("The weights for this model. Weights can be given for different formats, but should "
             "otherwise be equivalent. "
             "See [weight_formats_spec_0_4.md]"
             "(https://github.com/bioimage-io/spec-bioimage-io/blob/gh-pages/weight_formats_spec_0_4.md) "
             "for the required and optional fields per weight format. "
             "The available weight formats determine which consumers can use this model."
             ),
        ),
        required=True,
    )

    @pre_load
    def add_weights_format_key_to_weights_entry_value(self,
                                                      data: dict,
                                                      many=False,
                                                      partial=False,
                                                      **kwargs):
        data = deepcopy(
            data
        )  # Schema.validate() calls pre_load methods, thus we should not modify the input data
        if many or partial:
            raise NotImplementedError

        for weights_format, weights_entry in data.get("weights", {}).items():
            if "weights_format" in weights_entry:
                raise ValidationError(
                    f"Got unexpected key 'weights_format' in weights entry {weights_format}"
                )

            weights_entry["weights_format"] = weights_format

        return data

    @validates_schema
    def validate_reference_tensor_names(self, data, **kwargs):
        def get_tnames(tname: str):
            return [
                t.get("name") if isinstance(t, dict) else t.name
                for t in data.get(tname, [])
            ]

        valid_input_tensor_references = get_tnames("inputs")
        ins = data.get("inputs", [])
        outs = data.get("outputs", [])
        if not isinstance(ins, list) or not isinstance(outs, list):
            raise ValidationError(
                f"Failed to validate reference tensor names due to other validation errors in inputs/outputs."
            )

        for t in outs:
            if not isinstance(t, raw_nodes.OutputTensor):
                raise ValidationError(
                    "Failed to validate reference tensor names due to validation errors in outputs"
                )

            if t.postprocessing is missing:
                continue

            for postpr in t.postprocessing:
                if postpr.kwargs is missing:
                    continue

                ref_tensor = postpr.kwargs.get("reference_tensor", missing)
                if ref_tensor is not missing and ref_tensor not in valid_input_tensor_references:
                    raise ValidationError(f"{ref_tensor} not found in inputs")

        for t in ins:
            if not isinstance(t, raw_nodes.InputTensor):
                raise ValidationError(
                    "Failed to validate reference tensor names due to validation errors in inputs"
                )

            if t.preprocessing is missing:
                continue

            for prep in t.preprocessing:
                if prep.kwargs is missing:
                    continue

                ref_tensor = prep.kwargs.get("reference_tensor", missing)
                if ref_tensor is not missing and ref_tensor not in valid_input_tensor_references:
                    raise ValidationError(f"{ref_tensor} not found in inputs")

                if ref_tensor == t.name:
                    raise ValidationError(
                        f"invalid self reference for preprocessing of tensor {t.name}"
                    )

    @validates_schema
    def weights_entries_match_weights_formats(self, data, **kwargs):
        weights: typing.Dict[str, WeightsEntry] = data.get("weights", {})
        for weights_format, weights_entry in weights.items():
            if not isinstance(weights_entry, get_args(raw_nodes.WeightsEntry)):
                raise ValidationError(
                    "Cannot validate keys in weights field due to other validation errors."
                )

            if weights_format in ["pytorch_state_dict", "torchscript"]:
                if weights_format == "pytorch_state_dict":
                    assert isinstance(weights_entry,
                                      raw_nodes.PytorchStateDictWeightsEntry)
                elif weights_format == "torchscript":
                    assert isinstance(weights_entry,
                                      raw_nodes.TorchscriptWeightsEntry)
                else:
                    raise NotImplementedError

                if weights_entry.dependencies is missing and weights_entry.pytorch_version is missing:
                    self.warn(f"weights:{weights_format}",
                              "missing 'pytorch_version'")

            if weights_format in [
                    "keras_hdf5", "tensorflow_js",
                    "tensorflow_saved_model_bundle"
            ]:
                if weights_format == "keras_hdf5":
                    assert isinstance(weights_entry,
                                      raw_nodes.KerasHdf5WeightsEntry)
                elif weights_format == "tensorflow_js":
                    assert isinstance(weights_entry,
                                      raw_nodes.TensorflowJsWeightsEntry)
                elif weights_format == "tensorflow_saved_model_bundle":
                    assert isinstance(
                        weights_entry,
                        raw_nodes.TensorflowSavedModelBundleWeightsEntry)
                else:
                    raise NotImplementedError

                if weights_entry.dependencies is missing and weights_entry.tensorflow_version is missing:
                    self.warn(f"weights:{weights_format}",
                              "missing 'tensorflow_version'")

            if weights_format == "onnx":
                assert isinstance(weights_entry, raw_nodes.OnnxWeightsEntry)
                if weights_entry.dependencies is missing and weights_entry.opset_version is missing:
                    self.warn(f"weights:{weights_format}",
                              "missing 'opset_version'")
Beispiel #25
0
class Dependencies(SharedBioImageIOSchema):
    manager = fields.String(bioimageio_description="Dependency manager For example: 'conda', 'maven', or 'pip'")
    file = fields.Union(
        [fields.URI(), fields.RelativeLocalPath()],
        bioimageio_description="Dependency file. For example: 'environment.yaml', 'pom.xml', or 'requirements.txt'",
    )
Beispiel #26
0
class Author(_Person):
    name = fields.String(require=True, bioimageio_description="Full name.")
Beispiel #27
0
class RDF(_BioImageIOSchema):
    class Meta:
        unknown = EXCLUDE

    bioimageio_description = f"""# BioImage.IO Resource Description File Specification {get_args(FormatVersion)[-1]}
This specification defines the fields used in a general BioImage.IO-compliant resource description file (`RDF`).
An RDF is stored as a YAML file and describes resources such as models, datasets, applications and notebooks. 
Note that models are described with an extended Model RDF specification.

The RDF contains mandatory and optional fields. In the following description, optional fields are indicated by 
_optional_. _optional*_ with an asterisk indicates the field is optional depending on the value in another field.
If no specialized RDF exists for the specified type (like model RDF for type='model') additional fields may be 
specified.
"""

    attachments = fields.Nested(
        Attachments(),
        bioimageio_description="Additional unknown keys are allowed.")

    authors_bioimageio_description = (
        "A list of authors. The authors are the creators of the specifications and the primary points of contact."
    )
    authors = fields.List(
        fields.Nested(Author()),
        bioimageio_description=authors_bioimageio_description)

    badges = fields.List(fields.Nested(Badge()),
                         bioimageio_description="a list of badges")

    cite_bioimageio_description = """A list of citation entries.
Each entry contains a mandatory `text` field and either one or both of `doi` and `url`.
E.g. the citation for the model architecture and/or the training data used."""
    cite = fields.List(fields.Nested(CiteEntry()),
                       bioimageio_description=cite_bioimageio_description)

    config_bioimageio_description = (
        "A custom configuration field that can contain any keys not present in the RDF spec. "
        "This means you should not store, for example, github repo URL in `config` since we already have the "
        "`git_repo` key defined in the spec.\n"
        "Keys in `config` may be very specific to a tool or consumer software. To avoid conflicted definitions, "
        "it is recommended to wrap configuration into a sub-field named with the specific domain or tool name, "
        """for example:

```yaml
   config:
      bioimage_io:  # here is the domain name
        my_custom_key: 3837283
        another_key:
           nested: value
      imagej:
        macro_dir: /path/to/macro/file
```
"""
        "If possible, please use [`snake_case`](https://en.wikipedia.org/wiki/Snake_case) for keys in `config`."
    )
    config = fields.YamlDict(
        bioimageio_descriptio=config_bioimageio_description)

    covers = fields.List(
        fields.Union([fields.URL(), fields.RelativeLocalPath()]),
        bioimageio_description=
        "A list of cover images provided by either a relative path to the model folder, or a "
        "hyperlink starting with 'http[s]'. Please use an image smaller than 500KB and an aspect ratio width to height "
        "of 2:1. The supported image formats are: 'jpg', 'png', 'gif'.",  # todo: field_validators image format
    )

    description = fields.String(
        required=True,
        bioimageio_description="A string containing a brief description.")

    documentation = fields.Union(
        [
            fields.URL(),
            fields.RelativeLocalPath(validate=field_validators.Attribute(
                "suffix",
                field_validators.Equal(
                    ".md",
                    error=
                    "{!r} is invalid; expected markdown file with '.md' extension."
                ),
            )),
        ],
        bioimageio_description=
        "URL or relative path to markdown file with additional documentation. "
        "For markdown files the recommended documentation file name is `README.md`.",
    )

    download_url = fields.URL(
        bioimageio_description="optional url to download the resource from")

    format_version = fields.String(
        required=True,
        bioimageio_description_order=0,
        bioimageio_description=
        ("Version of the BioImage.IO Resource Description File Specification used."
         f"The current general format version described here is {get_args(FormatVersion)[-1]}. "
         "Note: The general RDF format is not to be confused with specialized RDF format like the Model RDF format."
         ),
    )

    @validates_schema
    def format_version_matches_type(self, data, **kwargs):
        format_version = data.get("format_version")
        type_ = data.get("type")
        try:
            patched_format_version = get_patched_format_version(
                type_, format_version)
            if format_version.split(".") > patched_format_version.split("."):
                raise ValueError(
                    f"Unknown format_version {format_version} (latest patch: {patched_format_version}; latest format version: )"
                )
        except Exception as e:
            raise ValidationError(
                f"Invalid format_version {format_version} for RDF type {type_}. (error: {e})"
            )

    git_repo_bioimageio_description = "A url to the git repository, e.g. to Github or Gitlab."
    git_repo = fields.URL(
        bioimageio_description=git_repo_bioimageio_description)

    icon = fields.String(
        bioimageio_description="an icon for the resource"
    )  # todo: limit length? validate=field_validators.Length(max=1)

    id = fields.String(
        bioimageio_description="Unique id within a collection of resources.")
    license_bioimageio_description = (
        "A [SPDX license identifier](https://spdx.org/licenses/)(e.g. `CC-BY-4.0`, `MIT`, "
        "`BSD-2-Clause`). We don't support custom license beyond the SPDX license list, if you need that please send "
        "an Github issue to discuss your intentions with the community.")
    license = fields.String(  # todo: make mandatory?
        # validate=field_validators.OneOf(LICENSES),  # enforce license id
        bioimageio_description=license_bioimageio_description)

    @validates("license")
    def warn_about_deprecated_spdx_license(self, value: str):
        license_info = LICENSES.get(value)
        if license_info is None:
            self.warn(
                "license",
                f"{value} is not a recognized SPDX license identifier. See https://spdx.org/licenses/"
            )
        else:
            if license_info.get("isDeprecatedLicenseId", False):
                self.warn("license",
                          f"{value} ({license_info['name']}) is deprecated.")

            if not license_info.get("isFsfLibre", False):
                self.warn(
                    "license",
                    f"{value} ({license_info['name']}) is not FSF Free/libre.")

    links = fields.List(
        fields.String(),
        bioimageio_description="links to other bioimage.io resources")

    maintainers = fields.List(
        fields.Nested(Maintainer()),
        bioimageio_description="Maintainers of this resource.")

    name = fields.String(
        required=True,
        bioimageio_description="name of the resource, a human-friendly name")

    @validates
    def warn_about_long_name(self, value: str):
        if isinstance(value, str):
            if len(value) > 64:
                self.warn(
                    "name",
                    f"Length of name ({len(value)}) exceeds the recommended maximum length of 64 characters."
                )
        else:
            self.warn("name", f"Could not check length of name {value}.")

    rdf_source = fields.Union(
        [fields.URL(), fields.DOI()],
        bioimageio_description=
        "url or doi to the source of the resource definition")
    source = fields.Union(
        [fields.URI(), fields.RelativeLocalPath()],
        bioimageio_description=
        "url or local relative path to the source of the resource",
    )

    tags = fields.List(fields.String(),
                       bioimageio_description="A list of tags.")

    @validates("tags")
    def warn_about_tag_categories(self, value):
        if BIOIMAGEIO_SITE_CONFIG is None:
            error = BIOIMAGEIO_SITE_CONFIG_ERROR
        else:
            missing_categories = []
            try:
                categories = {
                    c["type"]: c.get("tag_categories", {})
                    for c in BIOIMAGEIO_SITE_CONFIG["resource_categories"]
                }.get(self.__class__.__name__.lower(), {})
                for cat, entries in categories.items():
                    if not any(e in value for e in entries):
                        missing_categories.append({cat: entries})
            except Exception as e:
                error = str(e)
            else:
                error = None
                if missing_categories:
                    self.warn(
                        "tags",
                        f"Missing tags for categories: {missing_categories}")

        if error is not None:
            self.warn("tags", f"could not check tag categories ({error})")

    type = fields.String(required=True)

    # todo: restrict valid RDF types?
    @validates("type")
    def validate_type(self, value):
        schema_type = self.__class__.__name__.lower()
        if value != schema_type:
            self.warn(
                "type",
                f"Unrecognized type '{value}'. Validating as {schema_type}.")

    version = fields.Version(
        bioimageio_description=
        "The version number of the model. The version number format must be a string in "
        "`MAJOR.MINOR.PATCH` format following the guidelines in Semantic Versioning 2.0.0 (see https://semver.org/), "
        "e.g. the initial version number should be `0.1.0`.")
Beispiel #28
0
class Maintainer(_Person):
    github_user = fields.String(require=True,
                                bioimageio_description="GitHub user name.")