def _update_current_hyperparameters(self):
        """Update :attr:`current_init_params`, and :attr:`current_extra_params` according to the upcoming set of hyperparameters
        to be searched"""
        current_hyperparameters = self._get_current_hyperparameters()

        init_params = {
            _k[1:]: _v
            for _k, _v in current_hyperparameters.items()
            if _k[0] == 'model_init_params'
        }
        extra_params = {
            _k[1:]: _v
            for _k, _v in current_hyperparameters.items()
            if _k[0] == 'model_extra_params'
        }

        self.current_init_params = deep_restricted_update(
            self.model_init_params,
            init_params,
            iter_attrs=self.init_iter_attrs)
        self.current_extra_params = deep_restricted_update(
            self.model_extra_params,
            extra_params,
            iter_attrs=self.extra_iter_attrs)

        if (self.module_name == 'keras') and ('callbacks'
                                              in self.current_extra_params):
            self.current_extra_params['callbacks'] = reinitialize_callbacks(
                self.current_extra_params['callbacks'])
    def _update_current_hyperparameters(self):
        """Update :attr:`current_init_params`, and :attr:`current_extra_params` according to the
        upcoming set of hyperparameters to be searched"""
        current_hyperparameters = self._get_current_hyperparameters().items()

        init_params = {
            _k[1:]: _v for _k, _v in current_hyperparameters if _k[0] == "model_init_params"
        }
        extra_params = {
            _k[1:]: _v for _k, _v in current_hyperparameters if _k[0] == "model_extra_params"
        }
        # TODO: Replace above two with `general_utils.subdict` call that modifies key to a slice

        # FLAG: At this point, `dummy_layers` shows "kernel_initializer" as `orthogonal` instance with "__hh" attrs
        # FLAG: HOWEVER, the `orthogonal` instance does have `gain` set to the correct dummy value, ...
        # FLAG: ... so it might be ok, as long as experiment matching can still work with that

        self.current_init_params = deep_restricted_update(
            self.model_init_params, init_params, iter_attrs=self.init_iter_attrs
        )
        self.current_extra_params = deep_restricted_update(
            self.model_extra_params, extra_params, iter_attrs=self.extra_iter_attrs
        )

        if (self.module_name == "keras") and ("callbacks" in self.current_extra_params):
            self.current_extra_params["callbacks"] = reinitialize_callbacks(
                self.current_extra_params["callbacks"]
            )
def keras_prep_workflow(model_initializer, build_fn, extra_params,
                        source_script):
    """Conduct preparation steps necessary before hyperparameter optimization on a `Keras` model.
    Such steps include parsing and modifying `build_fn` to be of the form used by
    :class:`hyperparameter_hunter.optimization_core.BaseOptimizationProtocol`, compiling a dummy
    model to identify universal locations of given hyperparameter choices, and creating a simplified
    characterization of the models to be built during optimization in order to enable collection of
    similar Experiments

    Parameters
    ----------
    model_initializer: :class:`keras.wrappers.scikit_learn.<KerasClassifier; KerasRegressor>`
        A descendant of :class:`keras.wrappers.scikit_learn.BaseWrapper` used to build a Keras model
    build_fn: Callable
        The `build_fn` value provided to :meth:`keras.wrappers.scikit_learn.BaseWrapper.__init__`.
        Expected to return a compiled Keras model. May contain hyperparameter space choices
    extra_params: Dict
        The parameters expected to be passed to the extra methods of the compiled Keras model. Such
        methods include (but are not limited to) `fit`, `predict`, and `predict_proba`. Some of the
        common parameters given here include `epochs`, `batch_size`, and `callbacks`
    source_script: Str
        Absolute path to a Python file. Expected to end with one of the following extensions:
        '.py', '.ipynb'

    Returns
    -------
    reusable_build_fn: Callable
        Modified `build_fn` in which hyperparameter space choices are replaced by dict lookups, and
        the signature is given a standard name, and additional input parameters necessary for reuse
    reusable_wrapper_params: Dict
        The parameters expected to be passed to the extra methods of the compiled Keras model. Such
        methods include (but are not limited to) `fit`, `predict`, and `predict_proba`. Some of the
        common parameters given here include `epochs`, `batch_size`, and `callbacks`
    dummy_layers: List
        The layers of a compiled dummy Keras model constructed according to the given
        hyperparameters, in which each layer is a dict containing at least the following: the name
        of the layer class, allowed and used args, and default and used kwargs
    dummy_compile_params: Dict
        The parameters used on the `compile` call for the dummy model. If a parameter is accepted
        by the `compile` method, but is not explicitly given, its default value is included in
        `dummy_compile_params`"""
    #################### Prepare Model-Builder String ####################
    temp_builder_name = "__temp_model_builder"
    reusable_build_fn, expected_params = rewrite_model_builder(
        stringify_model_builder(build_fn))
    temp_model_file_str = build_temp_model_file(reusable_build_fn,
                                                source_script)

    #################### Save and Import Temporary Model Builder ####################
    write_python_source(
        temp_model_file_str,
        "{}/{}.py".format(os.path.split(__file__)[0], temp_builder_name))

    if temp_builder_name in sys.modules:
        del sys.modules[temp_builder_name]

    try:
        from .__temp_model_builder import build_fn as temp_build_fn
    except:
        raise

    #################### Translate Hyperparameter Names to Universal Paths ####################
    wrapper_params = dict(
        params={_k: eval(_v)
                for _k, _v in expected_params.items()},
        **extra_params)
    wrapper_params, dummified_params = check_dummy_params(wrapper_params)

    if ("optimizer_params" in dummified_params) and ("optimizer"
                                                     in dummified_params):
        raise ValueError(
            "Unable to optimize both `optimizer` and `optimizer_params`. Please try optimizing them separately"
        )

    compiled_dummy = initialize_dummy_model(model_initializer, temp_build_fn,
                                            wrapper_params)
    dummy_layers, dummy_compile_params = parameterize_compiled_keras_model(
        compiled_dummy)
    merged_compile_params = merge_compile_params(dummy_compile_params,
                                                 dummified_params)
    # FLAG: Will need to deal with capitalization conflicts when comparing similar experiments: `optimizer`='Adam' vs 'adam'

    consolidated_layers = consolidate_layers(dummy_layers,
                                             class_name_key=False,
                                             separate_args=False)
    wrapper_params = deep_restricted_update(wrapper_params, dummified_params)

    return (temp_build_fn, wrapper_params, consolidated_layers,
            merged_compile_params)
def keras_prep_workflow(model_initializer, build_fn, extra_params, source_script):
    """Conduct preparation steps necessary before hyperparameter optimization on a `Keras` model.
    Such steps include parsing and modifying `build_fn` to be of the form used by
    :class:`hyperparameter_hunter.optimization.protocol_core.BaseOptPro`, compiling a dummy model to
    identify universal locations of given hyperparameter choices, and creating a simplified
    characterization of the models to be built during optimization in order to enable similar
    Experiment collection

    Parameters
    ----------
    model_initializer: :class:`keras.wrappers.scikit_learn.<KerasClassifier; KerasRegressor>`
        A descendant of :class:`keras.wrappers.scikit_learn.BaseWrapper` used to build a Keras model
    build_fn: Callable
        The `build_fn` value provided to :meth:`keras.wrappers.scikit_learn.BaseWrapper.__init__`.
        Expected to return a compiled Keras model. May contain hyperparameter space choices
    extra_params: Dict
        The parameters expected to be passed to the extra methods of the compiled Keras model. Such
        methods include (but are not limited to) `fit`, `predict`, and `predict_proba`. Some of the
        common parameters given here include `epochs`, `batch_size`, and `callbacks`
    source_script: Str
        Absolute path to a Python file. Should end with one of following extensions: ".py", ".ipynb"

    Returns
    -------
    reusable_build_fn: Callable
        Modified `build_fn` in which hyperparameter space choices are replaced by dict lookups, and
        the signature is given a standard name, and additional input parameters necessary for reuse
    reusable_wrapper_params: Dict
        The parameters expected to be passed to the extra methods of the compiled Keras model. Such
        methods include (but are not limited to) `fit`, `predict`, and `predict_proba`. Some of the
        common parameters given here include `epochs`, `batch_size`, and `callbacks`
    dummy_layers: List
        The layers of a compiled dummy Keras model constructed according to the given
        hyperparameters, in which each layer is a dict containing at least the following: the name
        of the layer class, allowed and used args, and default and used kwargs
    dummy_compile_params: Dict
        The parameters used on the `compile` call for the dummy model. If a parameter is accepted
        by the `compile` method, but is not explicitly given, its default value is included in
        `dummy_compile_params`"""
    #################### Set Temporary Model-Builder Module Location ####################
    temp_module_name = f"__temp_model_builder_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S-%f')}"
    temp_module_dot_path = f"{TEMP_MODULES_DOT_PATH}.{temp_module_name}"
    temp_module_filepath = f"{TEMP_MODULES_DIR_PATH}/{temp_module_name}.py"

    #################### Prepare Model-Builder String ####################
    reusable_build_fn, expected_params = rewrite_model_builder(stringify_model_builder(build_fn))
    temp_module_str = build_temp_model_file(reusable_build_fn, source_script)

    #################### Save and Import Temporary Model Builder ####################
    write_python(temp_module_str, temp_module_filepath)

    if temp_module_name in sys.modules:
        del sys.modules[temp_module_name]

    temp_module_spec = spec_from_file_location(temp_module_dot_path, temp_module_filepath)
    temp_module = module_from_spec(temp_module_spec)
    temp_module_spec.loader.exec_module(temp_module)
    temp_build_fn = temp_module.build_fn

    #################### Translate Hyperparameter Names to Universal Paths ####################
    wrapper_params = dict(params={k: eval(v) for k, v in expected_params.items()}, **extra_params)
    # TODO: Intercept space choices that use callables (like `Categorical([glorot_normal(), orthogonal()])`)
    # TODO: Can't deal with them yet, due to imports unavailable in this context. Raise exception
    wrapper_params, dummified_params = check_dummy_params(wrapper_params)

    if ("optimizer_params" in dummified_params) and ("optimizer" in dummified_params):
        raise ValueError("Can't optimize `optimizer` with `optimizer_params`. Try them separately")

    compiled_dummy = initialize_dummy_model(model_initializer, temp_build_fn, wrapper_params)
    dummy_layers, dummy_compile_params = parameterize_compiled_keras_model(compiled_dummy)
    merged_compile_params = merge_compile_params(dummy_compile_params, dummified_params)
    # FLAG: Will need to deal with capitalization conflicts when comparing similar experiments: `optimizer`="Adam" vs "adam"

    consolidated_layers = consolidate_layers(dummy_layers, class_name_key=False, split_args=False)
    wrapper_params = deep_restricted_update(wrapper_params, dummified_params)

    return (temp_build_fn, wrapper_params, consolidated_layers, merged_compile_params)