コード例 #1
0
def load_config(config: dict) -> None:
    """Load in place the different Python objects serialized as "str" in the configuration. The ``"load_data", "augment", "create_search_space", "preprocessing", "objective"`` keys of ``config`` are handled.

    Args:
        config (dict): The JSON encoded configuration generated by DeepHyper.
    """
    # ! load functions
    config["load_data"]["func"] = load_attr(config["load_data"]["func"])

    # load augmentation strategy
    if not config.get("augment") is None:
        config["augment"]["func"] = load_attr(config["augment"]["func"])

    # load the function creating the search space
    config["search_space"]["class"] = load_attr(
        config["search_space"]["class"])

    if not config.get("preprocessing") is None:
        config["preprocessing"]["func"] = load_attr(
            config["preprocessing"]["func"])
    else:
        config["preprocessing"] = None

    if type(config["objective"]) is str and "." in config["objective"]:
        config["objective"] = load_attr(config["objective"])
コード例 #2
0
ファイル: _hps.py プロジェクト: felker/deephyper
def add_subparser(parsers):
    """
    :meta private:
    """
    parser_name = "hps"

    parser = parsers.add_parser(
        parser_name, help="Command line to run hyperparameter search.")

    subparsers = parser.add_subparsers()

    for name, module_attr in HPS_SEARCHES.items():
        search_cls = load_attr(module_attr)

        search_parser = build_parser_from(search_cls)

        subparser = subparsers.add_parser(name=name,
                                          parents=[search_parser],
                                          conflict_handler="resolve")

        subparser.set_defaults(func=main)
コード例 #3
0
def selectMetric(name: str):
    """Return the metric defined by name.

    Args:
        name (str): a string referenced in DeepHyper, one referenced in keras or an attribute name to import.

    Returns:
        str or callable: a string suppossing it is referenced in the keras framework or a callable taking (y_true, y_pred) as inputs and returning a tensor.
    """
    if callable(name):
        return name
    if metrics_func.get(name) == None and metrics_obj.get(name) == None:
        try:
            return load_attr(name)
        except:
            return name  # supposing it is referenced in keras metrics
    else:
        if name in metrics_func:
            return metrics_func[name]
        else:
            return metrics_obj[name]()
コード例 #4
0
ファイル: _hps.py プロジェクト: felker/deephyper
def build_parser_from(cls):
    """
    :meta private:
    """
    parser = argparse.ArgumentParser(conflict_handler="resolve")

    # add the arguments of a specific search
    add_arguments_from_signature(parser, cls)

    # add argument of Search.search interface
    parser.add_argument(
        "--max-evals",
        default=-1,
        type=int,
        help=
        "Type[int]. Defaults to '-1' when an number of evaluations is not imposed.",
    )
    parser.add_argument(
        "--timeout",
        default=None,
        type=int,
        help=
        "Type[int]. Number of seconds before killing the search. Defaults to 'None' when a time budget is not imposed.",
    )

    # add arguments for evaluators
    evaluator_added_arguments = add_arguments_from_signature(parser, Evaluator)

    for eval_name, eval_cls in EVALUATORS.items():
        try:
            eval_cls = load_attr(f"deephyper.evaluator.{eval_cls}")
            add_arguments_from_signature(parser,
                                         eval_cls,
                                         prefix=eval_name,
                                         exclude=evaluator_added_arguments)
        except ModuleNotFoundError as e:  # some evaluators are optional
            pass

    return parser
コード例 #5
0
ファイル: losses.py プロジェクト: felker/deephyper
def selectLoss(name: str):
    """Return the loss defined by name.

    Args:
        name (str): a string referenced in DeepHyper, one referenced in keras or an attribute name to import.

    Returns:
        str or callable: a string suppossing it is referenced in the keras framework or a callable taking (y_true, y_pred) as inputs and returning a tensor.
    """
    if callable(name):
        return name
    if losses_func.get(name) == None and losses_obj.get(name) == None:
        try:
            loaded_obj = load_attr(name)
            return loaded_obj
        except:
            return tf.keras.losses.get(
                name)  # supposing it is referenced in keras losses
    else:
        if name in losses_func:
            return losses_func[name]
        else:
            return losses_obj[name]()
コード例 #6
0
ファイル: _hps.py プロジェクト: felker/deephyper
def main(**kwargs):
    """
    :meta private:
    """

    sys.path.insert(0, ".")

    if kwargs["verbose"]:
        logging.basicConfig(filename="deephyper.log", level=logging.INFO)

    search_name = sys.argv[2]

    # load search class
    logging.info(f"Loading the search '{search_name}'...")
    search_cls = load_attr(HPS_SEARCHES[search_name])

    # load problem
    logging.info("Loading the problem...")
    problem = load_attr(kwargs.pop("problem"))

    # load run function
    logging.info("Loading the run-function...")
    run_function = load_attr(kwargs.pop("run_function"))

    # filter arguments from evaluator class signature
    logging.info("Loading the evaluator...")
    evaluator_method = kwargs.pop("evaluator")
    base_arguments = ["num_workers", "callbacks"]
    evaluator_kwargs = {k: kwargs.pop(k) for k in base_arguments}

    # remove the arguments from unused evaluator
    for method in EVALUATORS.keys():
        evaluator_method_kwargs = {
            k[len(evaluator_method) + 1:]: kwargs.pop(k)
            for k in kwargs.copy() if method in k
        }
        if method == evaluator_method:
            evaluator_kwargs = {**evaluator_kwargs, **evaluator_method_kwargs}

    # create evaluator
    logging.info(
        f"Evaluator(method={evaluator_method}, method_kwargs={evaluator_kwargs}"
    )
    evaluator = Evaluator.create(run_function,
                                 method=evaluator_method,
                                 method_kwargs=evaluator_kwargs)
    logging.info(f"Evaluator has {evaluator.num_workers} workers available.")

    # filter arguments from search class signature
    # remove keys in evaluator_kwargs
    kwargs = {k: v for k, v in kwargs.items() if k not in evaluator_kwargs}
    max_evals = kwargs.pop("max_evals")
    timeout = kwargs.pop("timeout")

    # TODO: How about checkpointing and transfer learning?

    # execute the search
    # remaining kwargs are for the search
    logging.info(f"Evaluator has {evaluator.num_workers} workers available.")
    search = search_cls(problem, evaluator, **kwargs)

    search.search(max_evals=max_evals, timeout=timeout)