示例#1
0
    def __init__(
        self, shape=None, dtype=None, startpoint=True, endpoint=False, label=False
    ):

        # TODO: this is a bandaid fix and will need to be addressed in a future.
        # I am not currently sure how best to handle this case.
        if shape == "unknown":
            self.shape = (None,)
        else:
            self.shape = list_of_numeric(
                default_value=None,
                is_type=int,
                required=True,
                description=(
                    "shape of the data feature\n"
                    " > e.g. data:datasets:'mnist':in:image_in:shape: [28,28,1]"
                ),
            )(shape)
            # TODO: include name?

        self.dtype = categorical(
            default_value=None,
            required=True,
            is_type=str,
            is_in_list=return_available_dtypes(),
            description=(
                "dtype of the feature\n"
                " > e.g. data:datasets:'mnist':in:image_in:dtype: 'float32"
            ),
        )(dtype)

        self.startpoint = startpoint
        self.endpoint = endpoint
        self.label = label
示例#2
0
    def __init__(self, cur_type=None, cur_options=None, cur_dataset=None):
        if not cur_type:
            raise ValueError("no type is specified")
        performance_options_names = [v[0] for v in PERFORMANCE_OPTIONS]
        self.type = categorical(
            default_value=None,
            required=True,
            is_type=str,
            to_lower=True,
            is_in_list=performance_options_names,
        )(cur_type)

        ind = performance_options_names.index(self.type)
        required_opts = PERFORMANCE_OPTIONS[ind][1]

        if cur_options:
            # check cur options
            # TODO: this will need to be improved at some point - error checking
            if isinstance(cur_options, dict):
                cur_opt_keys = cur_options.keys()
                for v in required_opts:
                    if v not in cur_opt_keys:
                        raise ValueError(
                            f"required performance options for {cur_type} are {required_opts} but only {cur_opt_keys} were specified. at least {v} is missing"
                        )
            else:
                raise TypeError(
                    f"cur options are not a dict: {cur_options}, type:{type(cur_options)}"
                )
        self.options = cur_options

        self.dataset = cur_dataset
示例#3
0
文件: layer.py 项目: kwierman/yeahml
    def __init__(self, layer_type=None, layer_source=None):
        if layer_type is None:
            raise ValueError("layer_type is not defined")
        else:

            # get function type and function information:
            # this outter if/else is for determining whether to obtain the layer
            # information from a source file, or from the keras api -- there are
            # no checks here
            if layer_source:
                layer_source = layer_source.replace("/", ".")
                if layer_source.endswith(".py"):
                    import_source = layer_source.rstrip("py").rstrip(".")
                custom_mod = importlib.import_module(f"{import_source}")

                try:
                    custom_func = custom_mod.__dict__[f"{layer_type}"]
                except AttributeError:
                    # NOTE: had a bad time with a list comp here so opted for a filter
                    names = list(
                        filter(lambda x: not x.startswith("_"),
                               dir(custom_mod)))
                    # names = [
                    #     n if not n.startswith("_") else pass for n in dir(custom_mod)
                    # ]
                    raise AttributeError(
                        f"custom layer named {layer_type} not found in {import_source} -- found: {names}. All names found {dir(custom_mod)}"
                    )
                self.str = f"{layer_type}"
                fn_dict = return_layer_defaults(custom_func)
            else:
                self.str = categorical(
                    required=True,
                    is_type=str,
                    is_in_list=return_available_layers().keys(),
                    description=
                    ("string defining the layer type of the indicated layer\n"
                     " > e.g. model:layers:conv_1:type: 'conv2d'"),
                )(layer_type)

                fn_dict = return_layer_defaults(self.str)
            # fn_dict:
            # {
            #     "func": func,
            #     "func_args": func_args,
            #     "func_defaults": func_defaults,
            # }
            self.func = fn_dict["func"]
            self.func_args = fn_dict["func_args"]
            self.func_defaults = fn_dict["func_defaults"]
示例#4
0
    def __init__(self,
                 opt_type=None,
                 opt_options=None,
                 opt_name=None,
                 opt_objectives=None):

        # TODO: there are consistency issues here with the names of classes
        # and where the types are being created/checked

        self.type = categorical(
            default_value=None,
            required=True,
            is_in_list=return_available_optimizers(),
            to_lower=True,
            description=("The type of optimizer being used\n"
                         " > e.g. optimize:optimizers:'name':type: 'adam'"),
        )(opt_type)

        self.options = parameter_config(
            known_dict={
                "learning_rate":
                numeric(
                    default_value=None,
                    required=True,
                    is_type=float,
                    description=
                    ("The learning rate for a specified optimizer\n"
                     " > e.g. optimize:optimizers:'name':options:learning_rate 0.001"
                     ),
                )
            })(opt_options)

        # TODO: in a secondary check, we need to ensure the losses specified
        # are valid+included
        self.objectives = list_of_categorical(
            default_value=None,
            required=True,
            to_lower=True,
            description=
            ("The objective for a specified optimizer to optimize\n"
             " > e.g. optimize:optimizers:'name':objectives: ['mnist_objective']"
             ),
        )(opt_objectives)
示例#5
0
    def __call__(self, optimizers_spec_dict):
        # TODO: this should be moved to the __init__
        if isinstance(optimizers_spec_dict, dict):
            temp_dict = {}
            for k, d in optimizers_spec_dict.items():

                optimizer_name = categorical(
                    default_value=None,
                    required=True,
                    is_type=str,
                    to_lower=False,
                    description=("The name of the optimizer \n"
                                 " > e.g. optimize:optimizers: 'mnist_opt'"),
                )(k)

                try:
                    opt_type = d["type"]
                except:
                    opt_type = None

                try:
                    opt_options = d["options"]
                except:
                    opt_options = None

                try:
                    opt_objectives = d["objectives"]
                except:
                    opt_objectives = None

                conf = optimizer_config(
                    opt_type=opt_type,
                    opt_options=opt_options,
                    opt_objectives=opt_objectives,
                )()
                temp_dict[k] = conf

        else:
            raise ValueError(
                f"optimizers_spec_dict ({optimizers_spec_dict}) is type {type(optimizers_spec_dict)} not type {type({})}"
            )
        return temp_dict
示例#6
0
    def __init__(self, loss_type=None, loss_options=None, loss_track=None):

        self.type = categorical(
            default_value=None,
            required=True,
            is_in_list=return_available_losses().keys(),
            is_type=str,
            to_lower=True,
        )(loss_type)
        # TODO: error check that options are valid
        self.options = list_of_dict(default_value=None,
                                    is_type=list,
                                    required=False)(loss_options)
        self.track = list_of_categorical(
            default_value=None,
            is_type=str,
            required=False,
            is_in_list=AVAILABLE_TRACKERS,
            to_lower=True,
        )(loss_track)
示例#7
0
from yeahml.config.default.types.param_types import optional_config

# TODO: check for extra keys in the configs that are not specified here
# meta
# TODO: set accepted options for `trace_level`
# TODO: ensure `default_load_params_path` is a path.. also, does this belong in
# meta?
# TODO: numbers could probably be converted to string (for experiment_name?)
meta = {
    "meta": {
        # directory
        "yeahml_dir": categorical(
            default_value="yeahml",
            required=False,
            is_type=str,
            to_lower=False,
            description=(
                "Root directory to store information\n"
                " > e.g. meta:yeahml_dir: 'yeahml'"
            ),
        ),  # could add a check that the location exists
        "data_name": categorical(
            default_value=None,
            required=True,
            is_type=str,
            to_lower=False,
            description=(
                "Description of the data used \n"
                " > e.g. meta:data_name: 'mnist', or  meta:data_name: 'V00'\n"
                "this logic will likely change in the future"
            ),
        ),
示例#8
0
    def __call__(self, performance_spec_dict):
        if isinstance(performance_spec_dict, dict):
            temp_dict = {}
            for k, d in performance_spec_dict.items():

                performance_name = categorical(default_value=None,
                                               required=True,
                                               is_type=str,
                                               to_lower=False)(k)

                try:
                    loss_dict = d["loss"]
                except KeyError:
                    loss_dict = None
                try:
                    metric_dict = d["metric"]
                except KeyError:
                    metric_dict = None
                try:
                    in_dict = d["in_config"]
                except KeyError:
                    raise KeyError(
                        f"no :in_config dict was specified for the performance of {k}: {d}"
                    )

                if isinstance(d, dict):
                    if loss_dict:
                        try:
                            loss_type = loss_dict["type"]
                        except KeyError:
                            loss_type = None

                        try:
                            loss_options = loss_dict["options"]
                        except KeyError:
                            loss_options = None

                        try:
                            loss_track = loss_dict["track"]
                        except KeyError:
                            loss_track = None

                    if metric_dict:
                        try:
                            metric_type = metric_dict["type"]
                        except KeyError:
                            metric_type = None
                        try:
                            metric_options = metric_dict["options"]
                        except KeyError:
                            metric_options = None
                    # a loss must be specified, but no metrics are ok
                    else:
                        metric_type = None
                        metric_options = None

                    val = performance_config(
                        loss_type=loss_type,
                        loss_options=loss_options,
                        loss_track=loss_track,
                        metric_type=metric_type,
                        metric_options=metric_options,
                        in_dict=in_dict,
                    )()
                    temp_dict[performance_name] = val
                else:
                    raise TypeError(
                        f"creating performance config, the performance of {k} does not have a valid dict - {d} is type {type(d)}"
                    )

        else:
            raise ValueError(
                f"{performance_spec_dict} is type {type(performance_spec_dict)} not type {type({})}"
            )
        return temp_dict