def loads(data, fix_imports=True, encoding="ASCII", errors="strict", buffers=None): """Robust pickle loading We first try to unpickle the object with pd.read_pickle. This makes no difference for non-pandas objects but makes the de-serialization of pandas objects more robust across pandas versions. If that fails, we use cloudpickle. If that fails, we return None but do not raise an error. See: https://github.com/pandas-dev/pandas/issues/16474 """ try: res = pd.read_pickle(io.BytesIO(data), compression=None) except (KeyboardInterrupt, SystemExit): raise except Exception: try: res = cloudpickle.loads(data) except (KeyboardInterrupt, SystemExit): raise except Exception: res = None tb = get_traceback() warnings.warn( f"Unable to read PickleType column from database:\n{tb}\n " "The entry was replaced by None.") return res
def wrapper_catch(*args, **kwargs): try: res = func(*args, **kwargs) except exclude: raise except exception as e: if onerror is not None: onerror(e) if reraise: raise e tb = get_traceback() if warn: msg = f"The following exception was caught:\n\n{tb}" warnings.warn(msg) if default == "__traceback__": res = tb elif callable(default): res = default(*args, **kwargs) else: res = default return res
def internal_criterion_and_derivative_template( x, *, task, direction, criterion, params, reparametrize_from_internal, convert_derivative, algorithm_info, derivative, criterion_and_derivative, numdiff_options, logging, db_kwargs, error_handling, error_penalty, first_criterion_evaluation, cache, cache_size, fixed_log_data, ): """Template for the internal criterion and derivative function. The internal criterion and derivative function only has the arguments x and task and algorithm_info. The other arguments will be partialed in by estimagic at some point. Algorithm_info and possibly even task will be partialed in by the algorithm. That is the reason why this function is called a template. Args: x (np.ndarray): 1d numpy array with internal parameters. task (str): One of "criterion", "derivative" and "criterion_and_derivative". direction (str): One of "maximize" or "minimize" criterion (callable): (partialed) user provided criterion function that takes a parameter dataframe as only argument and returns a scalar, an array like object or a dictionary. See :ref:`criterion`. params (pd.DataFrame): see :ref:`params` reparametrize_from_internal (callable): Function that takes x and returns a numpy array with the values of the external parameters. convert_derivative (callable): Function that takes the derivative of criterion at the external version of x and x and returns the derivative of the internal criterion. algorithm_info (dict): Dict with the following entries: "primary_criterion_entry": One of "value", "contributions" and "root_contributions" or "dict". "parallelizes": Bool that indicates if the algorithm calls the internal criterion function in parallel. If so, caching is disabled. "needs_scaling": bool "name": string derivative (callable, optional): (partialed) user provided function that calculates the first derivative of criterion. For most algorithm, this is the gradient of the scalar output (or "value" entry of the dict). However some algorithms (e.g. bhhh) require the jacobian of the "contributions" entry of the dict. You will get an error if you provide the wrong type of derivative. criterion_and_derivative (callable): Function that returns criterion and derivative as a tuple. This can be used to exploit synergies in the evaluation of both functions. The fist element of the tuple has to be exactly the same as the output of criterion. The second has to be exactly the same as the output of derivative. numdiff_options (dict): Keyword arguments for the calculation of numerical derivatives. See :ref:`first_derivative` for details. Note that the default method is changed to "forward" for speed reasons. logging (bool): Wether logging is used. db_kwargs (dict): Dictionary with entries "database", "path" and "fast_logging". error_handling (str): Either "raise" or "continue". Note that "continue" does not absolutely guarantee that no error is raised but we try to handle as many errors as possible in that case without aborting the optimization. error_penalty (dict): Dict with the entries "constant" (float) and "slope" (float). If the criterion or derivative raise an error and error_handling is "continue", return ``constant + slope * norm(params - start_params)`` where ``norm`` is the euclidean distance as criterion value and adjust the derivative accordingly. This is meant to guide the optimizer back into a valid region of parameter space (in direction of the start parameters). Note that the constant has to be high enough to ensure that the penalty is actually a bad function value. The default constant is 2 times the criterion value at the start parameters. The default slope is 0.1. first_criterion_evaluation (dict): Dictionary with entries "internal_params", "external_params", "output". cache (dict): Dictionary used as cache for criterion and derivative evaluations. cache_size (int): Number of evaluations that are kept in cache. Default 10. fixed_log_data (dict): Dictionary with fixed data to be saved in the database. Has the entries "stage" (str) and "substage" (int). Returns: float, np.ndarray or tuple: If task=="criterion" it returns the output of criterion which can be a float or 1d numpy array. If task=="derivative" it returns the first derivative of criterion, which is a numpy array. If task=="criterion_and_derivative" it returns both as a tuple. """ if algorithm_info["primary_criterion_entry"] == "root_contributions": if direction == "maximize": msg = ( "Optimizers that exploit a least squares structure like {} can only be " "used for minimization.") raise ValueError(msg.format(algorithm_info["name"])) x_hash = hash_array(x) cache_entry = cache.get(x_hash, {}) to_dos = _determine_to_dos(task, cache_entry, derivative, criterion_and_derivative) caught_exceptions = [] new_criterion, new_derivative, new_external_derivative = None, None, None current_params = params.copy() external_x = reparametrize_from_internal(x) current_params["value"] = external_x if to_dos == []: pass elif "numerical_criterion_and_derivative" in to_dos: def func(x): external_x = reparametrize_from_internal(x) p = params.copy() p["value"] = external_x return criterion(p) options = numdiff_options.copy() options["key"] = algorithm_info["primary_criterion_entry"] options["f0"] = cache_entry.get("criterion", None) options["return_func_value"] = True try: derivative_dict = first_derivative(func, x, **options) new_derivative = { algorithm_info["primary_criterion_entry"]: derivative_dict["derivative"] } new_criterion = derivative_dict["func_value"] except (KeyboardInterrupt, SystemExit): raise except Exception as e: caught_exceptions.append(get_traceback()) if "criterion" in cache_entry: raise Exception(DERIVATIVE_ERROR_MESSAGE) from e elif "criterion_and_derivative" in to_dos: try: new_criterion, new_external_derivative = criterion_and_derivative( current_params) except (KeyboardInterrupt, SystemExit): raise except Exception as e: caught_exceptions.append(get_traceback()) if "criterion" in cache_entry: raise Exception(DERIVATIVE_ERROR_MESSAGE) from e else: if "criterion" in to_dos: try: new_criterion = criterion(current_params) except (KeyboardInterrupt, SystemExit): raise except Exception as e: caught_exceptions.append(get_traceback()) if "derivative" in cache_entry: raise Exception(CRITERION_ERROR_MESSAGE) from e if "derivative" in to_dos: try: new_external_derivative = derivative(current_params) except (KeyboardInterrupt, SystemExit): raise except Exception as e: caught_exceptions.append(get_traceback()) if "criterion" in cache_entry: raise Exception(DERIVATIVE_ERROR_MESSAGE) from e if new_derivative is None and new_external_derivative is not None: if not isinstance(new_external_derivative, dict): new_external_derivative = { algorithm_info["primary_criterion_entry"]: new_external_derivative } new_derivative = { k: convert_derivative(v, internal_values=x) for k, v in new_external_derivative.items() } if caught_exceptions: if error_handling == "continue": new_criterion, new_derivative = _penalty_and_derivative( x, first_criterion_evaluation, error_penalty, algorithm_info) warnings.warn("\n\n".join(caught_exceptions)) else: raise Exception("\n\n".join(caught_exceptions)) if not algorithm_info["parallelizes"] and cache_size >= 1: _cache_new_evaluations(new_criterion, new_derivative, x_hash, cache, cache_size) new_criterion = _check_and_harmonize_criterion_output( cache_entry.get("criterion", new_criterion), algorithm_info) new_derivative = _check_and_harmonize_derivative( cache_entry.get("derivative", new_derivative), algorithm_info) if (new_criterion is not None or new_derivative is not None) and logging: _log_new_evaluations( new_criterion=new_criterion, new_derivative=new_derivative, external_x=external_x, caught_exceptions=caught_exceptions, db_kwargs=db_kwargs, fixed_log_data=fixed_log_data, ) res = _get_output_for_optimizer(new_criterion, new_derivative, task, algorithm_info, direction) return res
def internal_criterion_and_derivative_template( x, *, task, direction, criterion, converter, algo_info, derivative, criterion_and_derivative, numdiff_options, logging, db_kwargs, error_handling, error_penalty_func, fixed_log_data, history_container=None, return_history_entry=False, ): """Template for the internal criterion and derivative function. This function forms the basis of all functions that define the optimization problem and are passed to the internal optimizers in estimagic. I.e. the criterion, derivative and criterion_and_derivative functions. Most of the arguments of this function will be partialled in before the functions are passed to internal optimizers. That is the reason why this function is called a template. Args: x (np.ndarray): 1d numpy array with internal parameters. task (str): One of "criterion", "derivative" and "criterion_and_derivative". direction (str): One of "maximize" or "minimize" criterion (callable): (partialed) user provided criterion function that takes a parameter dataframe as only argument and returns a scalar, an array like object or a dictionary. See :ref:`criterion`. params (pd.DataFrame): see :ref:`params` converter (Converter): NamedTuple with methods to convert between internal and external derivatives, parameters and criterion outputs. algo_info (AlgoInfo): NamedTuple with attributes - primary_criterion_entry - name - parallelizes - needs_scaling - is_available derivative (callable, optional): (partialed) user provided function that calculates the first derivative of criterion. For most algorithm, this is the gradient of the scalar output (or "value" entry of the dict). However some algorithms (e.g. bhhh) require the jacobian of the "contributions" entry of the dict. You will get an error if you provide the wrong type of derivative. criterion_and_derivative (callable): Function that returns criterion and derivative as a tuple. This can be used to exploit synergies in the evaluation of both functions. The fist element of the tuple has to be exactly the same as the output of criterion. The second has to be exactly the same as the output of derivative. numdiff_options (dict): Keyword arguments for the calculation of numerical derivatives. See :ref:`first_derivative` for details. Note that the default method is changed to "forward" for speed reasons. logging (bool): Whether logging is used. db_kwargs (dict): Dictionary with entries "database", "path" and "fast_logging". error_handling (str): Either "raise" or "continue". Note that "continue" does not absolutely guarantee that no error is raised but we try to handle as many errors as possible in that case without aborting the optimization. error_penalty_func (callable): Function that takes ``x`` and ``task`` and returns a penalized criterion function, its derivative or both (depending) on task. fixed_log_data (dict): Dictionary with fixed data to be saved in the database. Has the entries "stage" (str) and "substage" (int). history_container (list or None): List to which parameter, criterion and derivative histories are appended. Should be set to None if an algorithm parallelizes over criterion or derivative evaluations. return_history_entry (bool): Whether the history container should be returned. Returns: float, np.ndarray or tuple: If task=="criterion" it returns the output of criterion which can be a float or 1d numpy array. If task=="derivative" it returns the first derivative of criterion, which is a numpy array. If task=="criterion_and_derivative" it returns both as a tuple. """ now = time.perf_counter() to_dos = _determine_to_dos(task, derivative, criterion_and_derivative) caught_exceptions = [] new_criterion, new_external_criterion = None, None new_derivative, new_external_derivative = None, None current_params, external_x = converter.params_from_internal( x, return_type="tree_and_flat", ) if to_dos == []: pass elif "numerical_criterion_and_derivative" in to_dos: def func(x): p = converter.params_from_internal(x, "tree") crit_full = criterion(p) crit_relevant = converter.func_to_internal(crit_full) out = {"full": crit_full, "relevant": crit_relevant} return out options = numdiff_options.copy() options["key"] = "relevant" options["return_func_value"] = True try: derivative_dict = first_derivative(func, x, **options) new_derivative = derivative_dict["derivative"] new_criterion = derivative_dict["func_value"]["relevant"] new_external_criterion = derivative_dict["func_value"]["full"] except (KeyboardInterrupt, SystemExit): raise except Exception as e: tb = get_traceback() caught_exceptions.append(tb) if error_handling == "raise": msg = ( "An error occurred when evaluating criterion to calculate a " "numerical derivative during optimization.") raise UserFunctionRuntimeError(msg) from e else: msg = ( "The following exception was caught when evaluating criterion to " f"calculate a numerical derivative during optimization:\n\n{tb}" ) warnings.warn(msg) elif "criterion_and_derivative" in to_dos: try: new_external_criterion, new_external_derivative = criterion_and_derivative( current_params) except (KeyboardInterrupt, SystemExit): raise except Exception as e: tb = get_traceback() caught_exceptions.append(tb) if error_handling == "raise": msg = ( "An error ocurred when evaluating criterion_and_derivative " "during optimization.") raise UserFunctionRuntimeError(msg) from e else: msg = ( "The following exception was caught when evaluating " f"criterion_and_derivative during optimization:\n\n{tb}") warnings.warn(msg) else: if "criterion" in to_dos: try: new_external_criterion = criterion(current_params) except (KeyboardInterrupt, SystemExit): raise except Exception as e: tb = get_traceback() caught_exceptions.append(tb) if error_handling == "raise": msg = ("An error ocurred when evaluating criterion during " "optimization.") raise UserFunctionRuntimeError(msg) from e else: msg = ( "The following exception was caught when evaluating " f"criterion during optimization:\n\n{tb}") warnings.warn(msg) if "derivative" in to_dos: try: new_external_derivative = derivative(current_params) except (KeyboardInterrupt, SystemExit): raise except Exception as e: tb = get_traceback() caught_exceptions.append(tb) if error_handling == "raise": msg = ( "An error ocurred when evaluating derivative during " "optimization") raise UserFunctionRuntimeError(msg) from e else: msg = ( "The following exception was caught when evaluating " f"derivative during optimization:\n\n{tb}") warnings.warn(msg) if new_external_criterion is not None and new_criterion is None: new_criterion = converter.func_to_internal(new_external_criterion) if new_external_derivative is not None and new_derivative is None: new_derivative = converter.derivative_to_internal( new_external_derivative, x) if caught_exceptions: new_criterion, new_derivative = error_penalty_func( x, task="criterion_and_derivative") if new_criterion is not None: scalar_critval = aggregate_func_output_to_value( f_eval=new_criterion, primary_key=algo_info.primary_criterion_entry, ) else: scalar_critval = None if (new_criterion is not None or new_derivative is not None) and logging: _log_new_evaluations( new_criterion=new_external_criterion, new_derivative=new_derivative, external_x=external_x, caught_exceptions=caught_exceptions, db_kwargs=db_kwargs, fixed_log_data=fixed_log_data, scalar_value=scalar_critval, now=now, ) res = _get_output_for_optimizer( new_criterion=new_criterion, new_derivative=new_derivative, task=task, direction=direction, ) if new_criterion is not None: hist_entry = { "params": current_params, "criterion": scalar_critval, "runtime": now, } else: hist_entry = None if history_container is not None and new_criterion is not None: history_container.append(hist_entry) if return_history_entry: res = (res, hist_entry) return res