Esempio n. 1
0
    def _before_root_node(self, problem, upper_bound):
        if self._user_model is None:
            raise RuntimeError("No user model. Did you call 'before_solve'?")
        obbt_upper_bound = None
        if upper_bound is not None and not is_inf(upper_bound):
            obbt_upper_bound = upper_bound

        model = self._user_model
        obbt_start_time = current_time()
        try:
            perform_obbt_on_model(
                model, problem, obbt_upper_bound,
                timelimit=self.solver.config['obbt_timelimit'],
                simplex_maxiter=self.solver.config['obbt_simplex_maxiter'],
            )
            self._bac_telemetry.increment_obbt_time(
                seconds_elapsed_since(obbt_start_time)
            )
        except TimeoutError:
            logger.info(0, 'OBBT timed out')
            self._bac_telemetry.increment_obbt_time(
                seconds_elapsed_since(obbt_start_time)
            )
            return

        except Exception as ex:
            logger.warning(0, 'Error performing OBBT: {}', ex)
            self._bac_telemetry.increment_obbt_time(
                seconds_elapsed_since(obbt_start_time)
            )
            raise
Esempio n. 2
0
 def cut_loop_should_terminate(self, state, start_time):
     elapsed_time = seconds_elapsed_since(start_time)
     return (
         self._cuts_converged(state) or
         self._cuts_iterations_exceeded(state) or
         self._timeout() or
         elapsed_time > self._cuts_timelimit
     )
Esempio n. 3
0
def timespan(telemetry, name):
    name = 'time.{}'.format(name)
    counter = telemetry.get_counter(name)
    if counter is None:
        counter = telemetry.create_counter(name, 0.0)

    telemetry._logger.log_solve_start(name)
    start = current_time()
    yield
    duration = seconds_elapsed_since(start)
    telemetry._logger.log_solve_end(name)
    counter.increment(duration)
Esempio n. 4
0
    def generate(self, run_id, problem, relaxed_problem, linear_problem,
                 mip_solution, tree, node):
        """Generate a new set of cuts."""
        all_cuts = []
        logger.info(
            run_id,
            'Generating cuts: {}',
            [gen.name for gen in self._generators],
        )

        paranoid_mode = self.galini.paranoid_mode

        for gen, counter in zip(self._generators, self._cuts_counters):
            start_time = current_time()
            cuts = gen.generate(
                run_id, problem, relaxed_problem, linear_problem, mip_solution,
                tree, node
            )
            elapsed_time = seconds_elapsed_since(start_time)

            if cuts is None:
                cuts = []

            if not isinstance(cuts, list):
                raise ValueError(
                    'CutsGenerator.generate must return a list of cuts.'
                )
            logger.info(
                run_id, '  * {} generated {} cuts.', gen.name, len(cuts)
            )

            for cut in cuts:
                if paranoid_mode:
                    _check_cut_coefficients(cut)
                all_cuts.append(cut)
            counter.increment(len(cuts), elapsed_time)
        return all_cuts
Esempio n. 5
0
    def generate(self, problem, relaxed_problem, mip_solution, tree, node):
        """Generate a new set of cuts."""
        all_cuts = []
        self.logger.info(
            'Generating cuts: {}',
            [gen.name for gen in self._generators],
        )

        for gen, counter in zip(self._generators, self._cuts_counters):
            start_time = current_time()
            cuts = gen.generate(problem, relaxed_problem, mip_solution, tree,
                                node)
            elapsed_time = seconds_elapsed_since(start_time)

            if cuts is None:
                cuts = []

            if not isinstance(cuts, list):
                raise ValueError(
                    'CutsGenerator.generate must return a list of cuts.')
            self.logger.info('  * {} generated {} cuts.', gen.name, len(cuts))

            for cut in cuts:
                if not self.galini.debug_assert_(
                        lambda:
                        _check_cut_coefficients_are_numerically_reasonable(cut
                                                                           ),
                        'Numerical coefficients in cut are not reasonable'):
                    from galini.ipython import embed_ipython
                    embed_ipython(
                        header=
                        'Numerical coefficients in cut are not reasonable')

                all_cuts.append(cut)
            counter.increment(len(cuts), elapsed_time)
        return all_cuts
Esempio n. 6
0
    def _perform_fbbt(self, run_id, problem, tree, node, maxiter=None):
        fbbt_start_time = current_time()
        logger.debug(run_id, 'Performing FBBT')

        objective_upper_bound = None
        if tree.upper_bound is not None:
            objective_upper_bound = tree.upper_bound

        fbbt_maxiter = self.fbbt_maxiter
        if maxiter is not None:
            fbbt_maxiter = maxiter
        branching_variable = None
        if not node.storage.is_root:
            branching_variable = node.storage.branching_variable
        bounds = perform_fbbt(
            problem,
            maxiter=fbbt_maxiter,
            timelimit=self.fbbt_timelimit,
            objective_upper_bound=objective_upper_bound,
            branching_variable=branching_variable,
        )

        self._bounds, self._monotonicity, self._convexity = \
            propagate_special_structure(problem, bounds)

        logger.debug(run_id, 'Set FBBT Bounds')
        cause_infeasibility = None
        for v in problem.variables:
            vv = problem.variable_view(v)
            new_bound = bounds[v]
            if new_bound is None:
                new_bound = Interval(None, None)

            new_lb = best_lower_bound(
                v.domain,
                new_bound.lower_bound,
                vv.lower_bound()
            )

            new_ub = best_upper_bound(
                v.domain,
                new_bound.upper_bound,
                vv.upper_bound()
            )

            if new_lb > new_ub:
                cause_infeasibility = v

        if cause_infeasibility is not None:
            logger.info(
                run_id, 'Bounds on variable {} cause infeasibility',
                cause_infeasibility.name
            )
        else:
            for v in problem.variables:
                vv = problem.variable_view(v)
                new_bound = bounds[v]

                if new_bound is None:
                    new_bound = Interval(None, None)

                new_lb = best_lower_bound(
                    v.domain,
                    new_bound.lower_bound,
                    vv.lower_bound()
                )

                new_ub = best_upper_bound(
                    v.domain,
                    new_bound.upper_bound,
                    vv.upper_bound()
                )

                if np.isinf(new_lb):
                    new_lb = -np.inf

                if np.isinf(new_ub):
                    new_ub = np.inf

                if np.abs(new_ub - new_lb) < mc.epsilon:
                    new_lb = new_ub

                logger.debug(run_id, '  {}: [{}, {}]', v.name, new_lb, new_ub)
                vv.set_lower_bound(new_lb)
                vv.set_upper_bound(new_ub)

        group_name = '_'.join([str(c) for c in node.coordinate])
        logger.tensor(run_id, group_name, 'lb', problem.lower_bounds)
        logger.tensor(run_id, group_name, 'ub', problem.upper_bounds)
        self._bac_telemetry.increment_fbbt_time(
            seconds_elapsed_since(fbbt_start_time)
        )
Esempio n. 7
0
    def _solve_problem_at_node(self, run_id, problem, relaxed_problem,
                               tree, node):
        logger.info(
            run_id,
            'Starting Cut generation iterations. Maximum iterations={}',
            self.cuts_maxiter,
        )
        generators_name = [
            g.name for g in self._cuts_generators_manager.generators
        ]
        logger.info(
            run_id,
            'Using cuts generators: {}',
            ', '.join(generators_name)
        )

        solution = self._try_solve_convex_problem(problem)
        if solution is not None:
            return solution

        if not node.has_parent:
            feasible_solution = node.initial_feasible_solution
        else:
            feasible_solution = None

        log_problem(
            logger, run_id, DEBUG, relaxed_problem,
            title='Convex Relaxation',
        )

        linear_problem = self._build_linear_relaxation(relaxed_problem)

        log_problem(
            logger, run_id, DEBUG, linear_problem.relaxed,
            title='Linearized Relaxation',
        )

        cuts_state = None
        lower_bound_search_start_time = current_time()
        if self._use_lp_cut_phase:
            logger.info(run_id, 'Start LP cut phase')
            originally_integer = []
            if not self._use_milp_cut_phase:
                for var in linear_problem.relaxed.variables:
                    vv = linear_problem.relaxed.variable_view(var)
                    if vv.domain.is_integer():
                        originally_integer.append(var)
                        linear_problem.relaxed.set_domain(var, core.Domain.REAL)

            feasible, cuts_state, mip_solution = self._perform_cut_loop(
                run_id, tree, node, problem, relaxed_problem, linear_problem,
            )

            for var in originally_integer:
                linear_problem.relaxed.set_domain(var, core.Domain.INTEGER)

            if not feasible:
                logger.info(run_id, 'LP solution is not feasible')
                self._bac_telemetry.increment_lower_bound_time(
                    seconds_elapsed_since(lower_bound_search_start_time)
                )
                return NodeSolution(mip_solution, feasible_solution)

            # Solve MILP to obtain MILP solution
            mip_solution = self._mip_solver.solve(linear_problem.relaxed)
            logger.info(
                run_id,
                'MILP solution after LP cut phase: {} {}',
                mip_solution.status,
                mip_solution,
            )
            if mip_solution.status.is_success():
                logger.update_variable(
                    run_id,
                    iteration=self._cut_loop_outer_iteration,
                    var_name='milp_solution',
                    value=mip_solution.objective_value()
                )

        self._update_node_branching_decision(
            linear_problem, mip_solution, node, problem
        )

        if self._use_milp_cut_phase:
            logger.info(run_id, 'Using MILP cut phase')
            feasible, cuts_state, mip_solution = self._perform_cut_loop(
                run_id, tree, node, problem, relaxed_problem, linear_problem,
            )

            if not feasible:
                logger.info(run_id, 'MILP cut phase solution is not feasible')
                self._bac_telemetry.increment_lower_bound_time(
                    seconds_elapsed_since(lower_bound_search_start_time)
                )
                return NodeSolution(mip_solution, feasible_solution)

        assert cuts_state is not None
        self._bac_telemetry.increment_lower_bound_time(
            seconds_elapsed_since(lower_bound_search_start_time)
        )

        if cuts_state.lower_bound >= tree.upper_bound and \
                not is_close(cuts_state.lower_bound, tree.upper_bound,
                             atol=mc.epsilon):
            # No improvement
            return NodeSolution(mip_solution, None)

        if self._timeout():
            # No time for finding primal solution
            return NodeSolution(mip_solution, None)

        upper_bound_search_start_time = current_time()

        starting_point = [v.value for v in mip_solution.variables]
        primal_solution = solve_primal_with_starting_point(
            run_id, problem, starting_point, self._nlp_solver, fix_all=True
        )
        new_primal_solution = solve_primal(
            run_id, problem, mip_solution, self._nlp_solver
        )
        if new_primal_solution is not None:
            primal_solution = new_primal_solution

        self._bac_telemetry.increment_upper_bound_time(
            seconds_elapsed_since(upper_bound_search_start_time)
        )

        if not primal_solution.status.is_success() and \
                feasible_solution is not None:
            # Could not get primal solution, but have a feasible solution
            return NodeSolution(mip_solution, feasible_solution)

        return NodeSolution(mip_solution, primal_solution)
Esempio n. 8
0
def perform_obbt_on_model(model, problem, upper_bound, timelimit,
                          simplex_maxiter):
    """Perform OBBT on Pyomo model using Coramin.

    Parameters
    ----------
    model : ConcreteModel
        the pyomo concrete model
    problem : Problem
        the GALINI problem
    upper_bound : float or None
        the objective value upper bound, if known
    timelimit : int
        a timelimit, in seconds
    simplex_maxiter : int
        the maximum number of simplex iterations

    """
    obbt_start_time = current_time()

    for var in model.component_data_objects(ctype=pe.Var):
        var.domain = pe.Reals

        if not (var.lb is None or np.isfinite(var.lb)):
            var.setlb(None)

        if not (var.ub is None or np.isfinite(var.ub)):
            var.setub(None)

    relaxed_model = relax(model)

    solver = pe.SolverFactory('cplex_persistent')
    solver.set_instance(relaxed_model)
    # TODO(fra): make this non-cplex specific
    simplex_limits = solver._solver_model.parameters.simplex.limits  # pylint: disable=protected-access
    simplex_limits.iterations.set(simplex_maxiter)
    # collect variables in nonlinear constraints
    nonlinear_variables = ComponentSet()
    for constraint in model.component_data_objects(ctype=pe.Constraint):
        # skip linear constraint
        if constraint.body.polynomial_degree() == 1:
            continue

        for var in identify_variables(constraint.body, include_fixed=False):
            # Coramin will complain about variables that are fixed
            # Note: Coramin uses an hard-coded 1e-6 tolerance
            if not var.has_lb() or not var.has_ub():
                nonlinear_variables.add(var)
            else:
                if not np.abs(var.ub - var.lb) < 1e-6:
                    nonlinear_variables.add(var)

    relaxed_vars = [
        getattr(relaxed_model, v.name) for v in nonlinear_variables
    ]

    logger.info(0, 'Performing OBBT on {} variables', len(relaxed_vars))

    # Avoid Coramin raising an exception if the problem has no objective
    # value but we set an upper bound.
    objectives = model.component_data_objects(pe.Objective,
                                              active=True,
                                              sort=True,
                                              descend_into=True)
    if len(list(objectives)) == 0:
        upper_bound = None

    time_left = timelimit - seconds_elapsed_since(obbt_start_time)
    with timeout(time_left, 'Timeout in OBBT'):
        result = coramin_obbt.perform_obbt(relaxed_model,
                                           solver,
                                           varlist=relaxed_vars,
                                           objective_bound=upper_bound)

    if result is None:
        return

    logger.debug(0, 'New Bounds')
    for v, new_lb, new_ub in zip(relaxed_vars, *result):
        vv = problem.variable_view(v.name)
        if new_lb is None or new_ub is None:
            logger.warning(0, 'Could not tighten variable {}', v.name)
        old_lb = vv.lower_bound()
        old_ub = vv.upper_bound()
        new_lb = best_lower_bound(vv.domain, new_lb, old_lb)
        new_ub = best_upper_bound(vv.domain, new_ub, old_ub)
        if not new_lb is None and not new_ub is None:
            if is_close(new_lb, new_ub, atol=mc.epsilon):
                if old_lb is not None and \
                        is_close(new_lb, old_lb, atol=mc.epsilon):
                    new_ub = new_lb
                else:
                    new_lb = new_ub
        vv.set_lower_bound(new_lb)
        vv.set_upper_bound(new_ub)
        logger.debug(0, '  {}: [{}, {}]', v.name, vv.lower_bound(),
                     vv.upper_bound())
Esempio n. 9
0
 def should_stop(self):
     iterations_exceeded = super().should_stop()
     elapsed = seconds_elapsed_since(self.start_time)
     return iterations_exceeded or elapsed > self.timelimit
Esempio n. 10
0
    def execute_with_problem(self, model, problem, args):
        galini = Galini()
        if args.config:
            galini.update_configuration(args.config)

        solver_cls = galini.get_solver(args.solver.lower())

        if solver_cls is None:
            available = ', '.join(solvers_reg.keys())
            print('Solver {} not available. Available solvers: {}'.format(
                args.solver, available))
            sys.exit(1)

        apply_logging_config(galini.get_configuration_group('logging'))
        solver = solver_cls(galini)

        galini_group = galini.get_configuration_group('galini')
        timelimit = galini_group.get('timelimit')
        elapsed_counter = galini.telemetry.create_gauge('elapsed_time', 0.0)

        set_timelimit(timelimit)
        start_timelimit()
        start_time = current_time()

        solver.before_solve(model, problem)

        solution = solver.solve(
            problem, known_optimal_objective=args.known_optimal_objective)

        elapsed_counter.set_value(seconds_elapsed_since(start_time))

        if solution is None:
            raise RuntimeError('Solver did not return a solution')

        obj_table = OutputTable('Objectives', [
            {
                'id': 'name',
                'name': 'Objective',
                'type': 't'
            },
            {
                'id': 'value',
                'name': 'Value',
                'type': 'f'
            },
        ])

        value = solution.objective.value
        if not problem.objective.original_sense.is_minimization():
            if value is not None:
                value = -value

        obj_table.add_row({
            'name': solution.objective.name,
            'value': value,
        })

        var_table = OutputTable('Variables', [
            {
                'id': 'name',
                'name': 'Variable',
                'type': 't'
            },
            {
                'id': 'value',
                'name': 'Value',
                'type': 'f'
            },
        ])
        for var in solution.variables:
            var_table.add_row({
                'name': var.name,
                'value': var.value,
            })

        counter_table = OutputTable('Counters', [
            {
                'id': 'name',
                'name': 'Name',
                'type': 't'
            },
            {
                'id': 'value',
                'name': 'Value',
                'type': 'f'
            },
        ])
        for counter in galini.telemetry.counters_values():
            counter_table.add_row(counter)

        print_output_table([obj_table, var_table, counter_table], args)
Esempio n. 11
0
    def _get_sdp_decomposition(self, problem, relaxed_problem):
        start_time = current_time()
        time_limit = self._preprocess_time_limit

        dim = self._dim
        agg_list = []

        variables = [
            var for var in problem.component_data_objects(
                pe.Var, active=True, descend_into=True)
        ]
        self._variables = variables

        num_vars = len(variables)
        self._num_vars = num_vars

        var_idx_map = pe.ComponentMap([(var, idx)
                                       for idx, var in enumerate(variables)])
        self._var_idx_map = var_idx_map

        constraints = [
            constraint for constraint in problem.component_data_objects(
                pe.Constraint, active=True, descend_into=True)
        ]
        self._constraints = constraints

        objective = next(
            problem.component_data_objects(pe.Objective,
                                           active=True,
                                           descend_into=True))
        self._objective = objective

        quad_terms_per_con = [[] for _ in range(1 + len(constraints))]

        if seconds_elapsed_since(start_time) > time_limit:
            return []

        # Find all quadratic terms (across all objectives + constraints) and form an adjacency matrix for their indices
        adj_mat = np.zeros((num_vars, num_vars))

        for con_idx, constraint in enumerate([objective, *constraints]):
            if isinstance(constraint, pe.Objective):
                root_expr = constraint.expr
            else:
                root_expr = constraint.body

            quadratic_expr = None

            if isinstance(root_expr, QuadraticExpression):
                quadratic_expr = root_expr
            elif isinstance(root_expr, SumExpression):
                for arg in root_expr.args:
                    if isinstance(arg, QuadraticExpression):
                        quadratic_expr = arg
                        break

            if seconds_elapsed_since(start_time) > time_limit:
                return []

            if quadratic_expr is not None:
                for term in quadratic_expr.terms:
                    if not is_close(term.coefficient,
                                    0.0,
                                    atol=self.galini.mc.epsilon):
                        idx_var1 = var_idx_map[term.var1]
                        idx_var2 = var_idx_map[term.var2]

                        adj_mat[idx_var1, idx_var2] = 1
                        adj_mat[idx_var2, idx_var1] = 1

                        quad_terms_per_con[con_idx].append(
                            (idx_var1, idx_var2, term.coefficient))

        # Get only cliques up the the dimension of the SDP decomposition
        all_cliques_iterator = enumerate_all_cliques(
            from_numpy_matrix(adj_mat))
        for clique in all_cliques_iterator:
            if len(clique) < 2:
                continue
            elif len(clique) <= dim:
                agg_list.append(set(clique))
            else:
                break

        # Eliminate cliques that are subsets of other cliques
        agg_list = [(x, []) for x in agg_list
                    if not any(x <= y for y in agg_list if x is not y)]

        # Look in each constraint at a time for cliques up to dim in size
        nb_objs = 1
        for con_idx, constraint in enumerate([objective, *constraints]):
            if seconds_elapsed_since(start_time) > time_limit:
                return []

            adj_mat_con = np.zeros((num_vars, num_vars))
            coeff_mat_con = np.zeros((num_vars, num_vars))

            G = Graph()
            for (idx_var1, idx_var2,
                 term_coeff) in quad_terms_per_con[con_idx]:
                adj_mat_con[idx_var1, idx_var2] = 1
                adj_mat_con[idx_var2, idx_var1] = 1
                G.add_edge(idx_var1, idx_var2)
                coeff_mat_con[idx_var1, idx_var2] = term_coeff
                coeff_mat_con[idx_var2, idx_var1] = term_coeff

            # Get only cliques up the the dimension of the SDP decomposition
            agg_list_con = []
            for clique in enumerate_all_cliques(G):
                if seconds_elapsed_since(start_time) > time_limit:
                    return []

                if len(clique) < 2:
                    continue
                elif len(clique) <= dim:
                    agg_list_con.append(set(clique))
                else:
                    break

            # Eliminate cliques that are subsets of other cliques
            agg_list_con = [
                x for x in agg_list_con
                if not any(x <= y for y in agg_list_con if x is not y)
            ]

            # Aggregate coefficient info (input_nn) used as input for neural networks for each constraint
            for agg_idx, (clique, _) in enumerate(agg_list):
                for clique_con in agg_list_con:
                    if clique_con <= clique and len(
                            clique_con.intersection(clique)) > 1:
                        mat_idxs = list(
                            combinations_with_replacement(sorted(clique), 2))
                        input_nn = itemgetter(*mat_idxs)(coeff_mat_con)
                        agg_list[agg_idx][1].append(
                            (np.asarray(input_nn), 1, con_idx - nb_objs))

        # Sort clique elements after done with them as sets (since neural networks are not invariant on order)
        agg_list = [(sorted(clique), _) for (clique, _) in agg_list]

        return agg_list
Esempio n. 12
0
def perform_obbt_on_model(solver, model, linear_model, upper_bound, timelimit, relative_gap, absolute_gap,
                          simplex_maxiter, mc):
    """Perform OBBT on Pyomo model using Coramin.

    Parameters
    ----------
    solver : Solver
        the mip solver to use
    model : ConcreteModel
        the pyomo concrete model
    linear_model : ConcreteModel
        the linear relaxation of model
    upper_bound : float or None
        the objective value upper bound, if known
    timelimit : int
        a timelimit, in seconds
    relative_gap : float
        mip relative gap
    absolute_gap : float
        mip absolute gap
    simplex_maxiter : int
        the maximum number of simplex iterations
    mc : MathContext
        GALINI math context
    """
    obbt_start_time = current_time()

    originally_integer = []
    original_bounds = pe.ComponentMap()
    for var in linear_model.component_data_objects(ctype=pe.Var):
        original_bounds[var] = var.bounds
        if var.is_continuous():
            originally_integer.append((var, var.domain))
            var.domain = pe.Reals

    # collect variables in nonlinear constraints
    nonlinear_variables = ComponentSet()
    for relaxation in relaxation_data_objects(linear_model, active=True, descend_into=True):
        if isinstance(relaxation, BILINEAR_RELAXATIONS_TYPES):
            for var in relaxation.get_rhs_vars():
                # Coramin will complain about variables that are fixed
                if not var.has_lb() or not var.has_ub():
                    nonlinear_variables.add(var)
                else:
                    if not np.abs(var.ub - var.lb) < mc.epsilon:
                        nonlinear_variables.add(var)

    time_left = timelimit - seconds_elapsed_since(obbt_start_time)
    nonlinear_variables = list(nonlinear_variables)
    vars_to_tighten = nonlinear_variables

    update_solver_options(
        solver,
        timelimit=time_left,
        maxiter=simplex_maxiter,
        relative_gap=relative_gap,
        absolute_gap=absolute_gap,
    )

    obbt_ex = None
    result = None
    try:
        (vars_to_minimize, vars_to_maximize) = \
            coramin_filters.aggressive_filter(
                candidate_variables=nonlinear_variables,
                relaxation=linear_model,
                solver=solver,
                objective_bound=upper_bound,
                tolerance=mc.epsilon,
                max_iter=10,
                improvement_threshold=5
            )
        vars_to_tighten = vars_to_minimize
        visited_vars = ComponentSet(vars_to_tighten)
        for v in vars_to_maximize:
            if v not in visited_vars:
                vars_to_tighten.add(v)
                visited_vars.add(v)
        result = coramin_obbt.perform_obbt(
            linear_model,
            solver,
            time_limit=time_left,
            varlist=vars_to_tighten,
            objective_bound=upper_bound,
            warning_threshold=mc.epsilon
        )
    except Exception as ex:
        obbt_ex = ex

    for var, domain in originally_integer:
        var.domain = domain

    # If we encountered an exception in Coramin, restore bounds and then raise.
    if obbt_ex is not None:
        for var, (lb, ub) in original_bounds.items():
            var.setlb(lb)
            var.setub(ub)
        raise obbt_ex

    if result is None:
        return

    new_bounds = pe.ComponentMap()

    eps = mc.epsilon

    for var, new_lb, new_ub in zip(vars_to_tighten, *result):
        original_var = model.find_component(var)
        if original_var is None:
            continue
        new_lb = best_lower_bound(var, new_lb, var.lb, eps)
        new_ub = best_upper_bound(var, new_ub, var.ub, eps)
        if np.abs(new_ub - new_lb) < eps:
            new_lb = new_ub
        new_bounds[var] = (new_lb, new_ub)
        safe_set_bounds(var, new_lb, new_ub)
        safe_set_bounds(original_var, new_lb, new_ub)

    # Rebuild relaxations since bounds changed
    for relaxation in relaxation_data_objects(linear_model, active=True, descend_into=True):
        relaxation.rebuild()

    return new_bounds
Esempio n. 13
0
def perform_fbbt_on_model(model, tree, node, maxiter, timelimit, eps, skip_special_structure=False):
    """

    Parameters
    ----------
    model
    tree
    node
    maxiter
    timelimit
    eps

    Returns
    -------

    """
    objective_bounds = pe.ComponentMap()
    objective_bounds[model._objective] = (tree.lower_bound, tree.upper_bound)

    branching_variable = None
    if not node.storage.is_root:
        branching_variable = node.storage.branching_variable

    fbbt_start_time = current_time()
    should_continue = lambda: seconds_elapsed_since(fbbt_start_time) <= timelimit

    bounds = perform_fbbt(
        model,
        max_iter=maxiter,
        objective_bounds=objective_bounds,
        should_continue=should_continue,
        #branching_variable=branching_variable,
    )

    if not skip_special_structure:
        monotonicity, convexity = \
            propagate_special_structure(model, bounds)
    else:
        monotonicity = convexity = None

    cause_infeasibility = None
    new_bounds_map = pe.ComponentMap()
    for var in model.component_data_objects(pe.Var, active=True):
        new_bound = bounds[var]
        if new_bound is None:
            new_bound = Interval(None, None)

        new_lb = best_lower_bound(var, new_bound.lower_bound, var.lb, eps)
        new_ub = best_upper_bound(var, new_bound.upper_bound, var.ub, eps)

        new_bounds_map[var] = (new_lb, new_ub)
        if new_lb > new_ub:
            cause_infeasibility = var
            break

    if cause_infeasibility is not None:
        return None, None, None
    else:
        for var, (new_lb, new_ub) in new_bounds_map.items():
            if np.abs(new_ub - new_lb) < eps:
                new_lb = new_ub
            safe_set_bounds(var, new_lb, new_ub)
            # Also update bounds map
            bounds[var] = Interval(new_lb, new_ub)

    return bounds, monotonicity, convexity
Esempio n. 14
0
    def solve(self,
              model,
              algorithm=None,
              clone_model=True,
              known_optimal_objective=None):
        if clone_model:
            model = model.clone()

        if algorithm is None:
            algorithm = 'bac'

        algo_cls = self.get_algorithm(algorithm.lower())

        if algo_cls is None:
            available = ', '.join(self.available_algorithms())
            raise Exception(
                'Algorithm {} not available. Available algorithms: {}'.format(
                    algorithm, available))

        galini_group = self.get_configuration_group('galini')
        timelimit = galini_group.get('timelimit')
        elapsed_counter = self.telemetry.create_gauge('elapsed_time', 0.0)

        self.timelimit.set_timelimit(timelimit)
        self.timelimit.start_now()

        start_time = current_time()

        # Check problem only has one objective, if it's maximisation convert it to minimisation
        original_objective = None
        for objective in model.component_data_objects(pe.Objective,
                                                      active=True):
            if original_objective is not None:
                raise ValueError(
                    'Algorithm does not support models with multiple objectives'
                )
            original_objective = objective

        if original_objective is None:
            model._objective = pe.Objective(expr=0.0, sense=pe.minimize)
        else:
            if not original_objective.is_minimizing():
                new_objective = pe.Objective(expr=-original_objective.expr,
                                             sense=pe.minimize)
            else:
                new_objective = pe.Objective(expr=original_objective.expr,
                                             sense=pe.minimize)
            model._objective = new_objective
            model._objective.is_originally_minimizing = original_objective.is_minimizing(
            )
            original_objective.deactivate()

        for var in model.component_data_objects(pe.Var, active=True):
            if var.is_fixed():
                continue
            lb = var.lb if var.lb is not None else -np.inf
            ub = var.ub if var.ub is not None else np.inf
            value = var.value
            if value is not None and (value < lb or value > ub):
                if np.isinf(lb) or np.isinf(ub):
                    value = 0.0
                else:
                    value = lb + (ub - lb) / 2.0
                    if var.is_integer() or var.is_binary():
                        value = np.rint(value)

                var.set_value(value)

        algo = algo_cls(self)
        solution = algo.solve(model,
                              known_optimal_objective=known_optimal_objective)

        del model._objective
        if original_objective is not None:
            original_objective.activate()
            if not original_objective.is_minimizing():
                if solution.objective is not None:
                    solution.objective = -solution.objective

        elapsed_counter.set_value(seconds_elapsed_since(start_time))

        return solution