Пример #1
0
 def run(self, plan: goos.OptimizationPlan) -> None:
     value = self._value
     if isinstance(value, Function):
         value = plan.eval_node(value).array
     plan.set_var_value(self._var,
                        value,
                        check_frozen=not self._var._is_param)
Пример #2
0
    def run(self, plan: goos.OptimizationPlan) -> None:
        if isinstance(self._obj, str):
            plan.logger.info(self._obj)
            return

        if isinstance(self._obj, goos.Function):
            nodes = [self._obj]
        else:
            nodes = self._obj

        values = plan.eval_nodes(nodes)
        for node, val in zip(nodes, values):
            plan.logger.info("{}: {}".format(node._goos_name, val))
Пример #3
0
    def run(self, plan: goos.OptimizationPlan, start_iter: int = 0):
        variables = plan.get_thawed_vars()

        var_shapes = []
        initial_val = []
        bounds = []
        for var in variables:
            value = plan.get_var_value(var)
            if value.shape:
                var_shapes.append(value.shape)
            else:
                var_shapes.append([1])
            initial_val.append(value.flatten())

            bound = plan.get_var_bounds(var)
            for lower, upper in zip(bound[0].flatten(), bound[1].flatten()):
                if lower == -np.inf:
                    lower = None
                if upper == np.inf:
                    upper = None
                bounds.append((lower, upper))

        override_map = {
            plan._node_map[var_name]: flows.NumericFlow(value)
            for var_name, value in plan._var_value.items()
        }

        # TODO(logansu): Currently we call optimize with every single variable
        # in the plan, but we can really reduce the number of elements by
        # focusing only the variables that are required to compute the objective
        # function.
        def unpack(x):
            cur_ind = 0
            values = []
            for shape in var_shapes:
                values.append(
                    np.reshape(x[cur_ind:cur_ind + np.prod(shape)], shape))
                cur_ind += np.prod(shape)
            return values

        def unpack_and_set(x):
            values = unpack(x)
            for var, value in zip(variables, values):
                plan.set_var_value(var, value)

        def func(x):
            unpack_and_set(x)
            val = plan.eval_node(self._obj).array
            plan.logger.debug("Function evaluated: %f", val)
            return val

        def grad(x):
            unpack_and_set(x)
            grad_flows = plan.eval_grad(self._obj, variables)
            val = np.hstack([flow.array_grad.flatten() for flow in grad_flows])
            plan.logger.debug("Gradient evaluated, norm: %f",
                              np.linalg.norm(val))
            return val

        # To avoid scipy warning, only pass Jacobian to methods that need it.
        methods_that_need_jacobian = {
            "CG", "BFGS", "L-BFGS-B", "TNC", "SLSQP", "dogleg", "trust-ncg"
        }
        jac = None
        if self._method in methods_that_need_jacobian:
            jac = grad

        # Handle constraints.
        methods_that_accept_constraints = {'SLSQP', 'COBYLA'}
        constraints = None
        if self._method in methods_that_accept_constraints:
            constraints = []
            for eq in self._cons_eq:

                def cons_fun(x):
                    unpack_and_set(x)
                    val = plan.eval_node(eq).array
                    plan.logger.debug("Eq. cons. function evaluated: %f", val)
                    return val

                def cons_jac(x):
                    unpack_and_set(x)
                    grad_flows = plan.eval_grad(eq, variables)
                    val = []
                    for flow, var_shape in zip(grad_flows, var_shapes):
                        # Flatten only the dimension corresponding to the
                        # variable.
                        arr = flow.array_grad
                        new_shape = arr.shape[:-len(var_shape)] + (
                            np.prod(var_shape), )
                        val.append(np.reshape(arr, new_shape))

                    val = np.hstack(val)
                    plan.logger.debug("Eq. cons. gradient evaluated, norm: %f",
                                      np.linalg.norm(val))
                    return val

                constraints.append({
                    "type": "eq",
                    "fun": cons_fun,
                    "jac": cons_jac
                })

            for ineq in self._cons_ineq:
                # Note the negative sign because of opposite convention
                # for inequalities (f >= 0 vs f <= 0).
                def cons_fun(x):
                    unpack_and_set(x)
                    val = plan.eval_node(ineq).array
                    plan.logger.debug("Ineq. cons. function evaluated: %f",
                                      val)
                    return -val

                def cons_jac(x):
                    unpack_and_set(x)
                    grad_flows = plan.eval_grad(ineq, variables)
                    val = []
                    for flow, var_shape in zip(grad_flows, var_shapes):
                        # Flatten only the dimension corresponding to the
                        # variable.
                        arr = flow.array_grad
                        new_shape = arr.shape[:-len(var_shape)] + (
                            np.prod(var_shape), )
                        val.append(np.reshape(arr, new_shape))

                    val = np.hstack(val)
                    plan.logger.debug(
                        "Ineq. cons. gradient evaluated, norm: %f",
                        np.linalg.norm(val))
                    return -val

                constraints.append({
                    "type": "ineq",
                    "fun": cons_fun,
                    "jac": cons_jac
                })
        elif (len(self._cons_ineq) > 0) or (len(self._cons_eq) > 0):
            plan.logger.warning(
                "Using optimizer that cannot handle constraints. Constraints "
                "ignored: %d" % (len(self._cons_ineq) + len(self._cons_eq)))

        # Keep track of iteration number.
        iter_num = start_iter

        def callback(x):
            # Update the variable values before evaluating monitors.
            values = unpack(x)
            for var, value in zip(variables, values):
                plan.set_var_value(var, value)

            # Update iteration number.
            nonlocal iter_num
            iter_num += 1
            if self._iter:
                plan.set_var_value(self._iter, iter_num)

            plan.write_event({
                "state": "optimizing",
                "iteration": iter_num
            }, self._monitor_list)

        # Adjust total number of iterations if we are resuming.
        options = copy.deepcopy(self._options)
        if "maxiter" in options:
            options["maxiter"] -= start_iter

        initial_val = np.hstack(initial_val)
        self._results = scipy.optimize.minimize(func,
                                                initial_val,
                                                method=self._method,
                                                jac=jac,
                                                callback=callback,
                                                bounds=bounds,
                                                constraints=constraints,
                                                **options)
        unpack_and_set(self._results["x"])
Пример #4
0
 def run(self, plan: goos.OptimizationPlan) -> None:
     plan.thaw_var(self._var)
Пример #5
0
 def run(self, plan: goos.OptimizationPlan) -> None:
     plan.freeze_var(self._var)
Пример #6
0
    def run(self, plan: goos.OptimizationPlan) -> None:
        target = plan.eval_node(self._cont_var).array.squeeze()

        edges, levels = _get_general_edge_loc_dp(target, self._depths,
                                                 self._discr_min_features,
                                                 self._discr_max_features,
                                                 self._start_depth_ind,
                                                 self._end_depth_ind,
                                                 self._divisions)
        edges = np.array(edges) * self._pixel_size

        if not self._use_edge_locs:
            widths = np.r_[edges[0], edges[1:] - edges[:-1]]
            plan.set_var_value(self._disc_widths, widths)
            ones = np.ones_like(widths)
            ones_no_start = np.array(ones)
            ones_no_start[0] = 0
            plan.set_var_bounds(
                self._disc_widths,
                [ones_no_start * self._min_features, ones * np.inf])
        else:
            plan.set_var_value(self._disc_widths, edges)
            ones = np.ones_like(edges)
            plan.set_var_bounds(self._disc_widths,
                                [ones * -np.inf, ones * np.inf])

        plan.set_var_value(self._disc_levels, levels)