Ejemplo n.º 1
0
    ("wide_resnet101_2", "WideResNet101_2", "fully_connected"),
    ("wide_resnet50_2", "WideResNet50_2", "fully_connected"),
]:
    cls_config = type(
        name + "ModelConfig",
        (PyTorchPreTrainedModelConfig, ),
        {},
    )
    cls_context = type(
        name + "ModelContext",
        (PyTorchPretrainedContext, ),
        {},
    )

    dffml_cls = type(
        name + "Model",
        (PyTorchPreTrainedModel, ),
        {
            "CONFIG": cls_config,
            "CONTEXT": cls_context,
            "PYTORCH_MODEL": model_name,
            "LAST_LAYER_TYPE": last_layer_type,
        },
    )

    dffml_cls = entrypoint(model_name)(dffml_cls)

    setattr(sys.modules[__name__], cls_config.__qualname__, cls_config)
    setattr(sys.modules[__name__], cls_context.__qualname__, cls_context)
    setattr(sys.modules[__name__], dffml_cls.__qualname__, dffml_cls)
Ejemplo n.º 2
0
                        f"scikit-{entry_point_name}",
                    ),
                ),
            ),
            "predict": (str, field("Label or the value to be predicted")),
            "features": (Features, field("Features to train on")),
        },
    )

    dffml_cls_ctx = type(
        name + "ModelContext",
        (ScikitContext, ),
        {"applicable_features": applicable_features_function},
    )

    dffml_cls = type(
        name + "Model",
        (Scikit, ),
        {
            "CONFIG": dffml_config,
            "CONTEXT": dffml_cls_ctx,
            "SCIKIT_MODEL": cls,
        },
    )
    # Add the ENTRY_POINT_ORIG_LABEL
    dffml_cls = entrypoint(entry_point_name)(dffml_cls)

    setattr(sys.modules[__name__], dffml_config.__qualname__, dffml_config)
    setattr(sys.modules[__name__], dffml_cls_ctx.__qualname__, dffml_cls_ctx)
    setattr(sys.modules[__name__], dffml_cls.__qualname__, dffml_cls)
Ejemplo n.º 3
0
        self.function = self.LOSS(**self.config._asdict())

    @classmethod
    def load(cls, class_name: str = None):
        for name, loss_class in inspect.getmembers(nn, inspect.isclass):
            if name.endswith("Loss"):
                if name.lower() == class_name:
                    return getattr(sys.modules[__name__], name + "Function")


for name, loss_class in inspect.getmembers(nn, inspect.isclass):
    if name.endswith("Loss"):

        cls_config = make_pytorch_config(name + "Config", loss_class)

        cls = entrypoint(name.lower())(type(
            name + "Function",
            (PyTorchLoss, ),
            {
                "CONFIG": cls_config,
                "CONTEXT": {},
                "LOSS": loss_class,
            },
        ))

        setattr(sys.modules[__name__], cls.__qualname__, cls)

# TODO Currently only the torch.nn module has annotations
# Add PyTorchOptimizer and PyTorchScheduler after the next torch release
# as they are currently adding type annotations for everything