def __init__(self, model): self.model = model self.original_to_new_var_map = pe.ComponentMap() self.aux_var_map = dict() self.reverse_var_map = dict() self.var_relax_map = pe.ComponentMap() self.degree_map = pe.ComponentMap() self.counter = RelaxationCounter()
def set_input(self, aux_var, shape, f_x_expr, persistent_solvers=None, large_eval_tol=math.inf, use_linear_relaxation=True): """ Parameters ---------- aux_var: pyomo.core.base.var._GeneralVarData The auxiliary variable replacing f(x) shape: FunctionShape Either FunctionShape.CONVEX or FunctionShape.CONCAVE f_x_expr: pyomo expression The pyomo expression representing f(x) persistent_solvers: list List of persistent solvers that should be updated when the relaxation changes large_eval_tol: float To avoid numerical problems, if f_x_expr or its derivative evaluates to a value larger than large_eval_tol, at a point in x_pts, then that point is skipped. use_linear_relaxation: bool Specifies whether a linear or nonlinear relaxation should be used """ if shape not in {FunctionShape.CONVEX, FunctionShape.CONCAVE}: raise ValueError( 'MultivariateRelaxation only supports concave or convex functions.' ) self._function_shape = shape if shape == FunctionShape.CONVEX: relaxation_side = RelaxationSide.UNDER else: relaxation_side = RelaxationSide.OVER self._set_input(relaxation_side=relaxation_side, persistent_solvers=persistent_solvers, use_linear_relaxation=use_linear_relaxation, large_eval_tol=large_eval_tol) self._xs = list(identify_variables(f_x_expr, include_fixed=False)) self._aux_var_ref.set_component(aux_var) self._f_x_expr = f_x_expr lb_oa_pt = pe.ComponentMap() ub_oa_pt = pe.ComponentMap() should_use_lb_oa_pt = True should_use_ub_oa_pt = True for v in self._xs: lb, ub = tuple(_get_bnds_list(v)) if lb <= -math.inf: should_use_lb_oa_pt = False else: lb_oa_pt[v] = lb if ub >= math.inf: should_use_ub_oa_pt = False else: ub_oa_pt[v] = ub if should_use_lb_oa_pt: self.add_oa_point(var_values=lb_oa_pt) if should_use_ub_oa_pt: self.add_oa_point(var_values=ub_oa_pt)
def test_push_and_pop_partitions_2(self): m = pe.ConcreteModel() m.x = pe.Var(bounds=(-1, 1)) m.y = pe.Var() m.c = coramin.relaxations.PWXSquaredRelaxation() m.c.build(x=m.x, aux_var=m.y) self.assertEqual(m.c._partitions, pe.ComponentMap([(m.x, [-1, 1])])) m.x.setlb(0) m.c.rebuild() self.assertEqual(m.c._partitions, pe.ComponentMap([(m.x, [0, 1])])) m.x.setlb(-1) m.c.rebuild() self.assertEqual(m.c._partitions, pe.ComponentMap([(m.x, [-1, 1])])) m.x.value = 0.5 m.c.add_partition_point() m.c.rebuild() self.assertEqual(m.c._partitions, pe.ComponentMap([(m.x, [-1, 0.5, 1])])) m.x.setlb(0) m.c.rebuild() self.assertEqual(m.c._partitions, pe.ComponentMap([(m.x, [0, 0.5, 1])])) m.x.setlb(-1) m.c.rebuild() self.assertEqual(m.c._partitions, pe.ComponentMap([(m.x, [-1, 0.5, 1])])) m.x.setub(0) m.c.rebuild() self.assertEqual(m.c._partitions, pe.ComponentMap([(m.x, [-1, 0])])) m.x.setub(1) m.c.rebuild() self.assertEqual(m.c._partitions, pe.ComponentMap([(m.x, [-1, 1])]))
def relax_expression(model, expr, relaxation_side, data): relaxation_side_map = pe.ComponentMap() relaxation_side_map[expr] = relaxation_side expr = _relax_expr(expr=expr, aux_var_map=data.aux_var_map, parent_block=model, relaxation_side_map=relaxation_side_map, counter=data.counter, degree_map=data.degree_map) return expr
def _solution_from_tree(self, problem, tree): nodes_visited = tree.nodes_visited if len(tree.solution_pool) == 0: # Return lower bound only optimal_vars = pe.ComponentMap( (v, pe.value(v)) for v in problem.component_data_objects(pe.Var, active=True)) return BabSolution( BabStatusInterrupted(), None, optimal_vars, dual_bound=tree.state.lower_bound, nodes_visited=nodes_visited, ) primal_solution = tree.solution_pool.head return BabSolution( primal_solution.status, primal_solution.objective, primal_solution.variables, dual_bound=tree.state.lower_bound, nodes_visited=nodes_visited, nodes_remaining=len(tree.open_nodes), is_timeout=self.galini.timelimit.timeout(), has_converged=self.has_converged(tree.state), node_limit_exceeded=self.node_limit_exceeded(tree.state), )
def add_oa_point(self, var_values=None): """ Add a point at which an outer-approximation cut for a convex constraint should be added. This does not rebuild the relaxation. You must call rebuild() for the constraint to get added. Parameters ---------- var_values: pe.ComponentMap """ if var_values is None: var_values = pe.ComponentMap() for v in self.get_rhs_vars(): var_values[v] = v.value else: var_values = pe.ComponentMap(var_values) self._oa_points.append(var_values)
def test_adjacency_matrix(galini, problem): linear_model, _, _ = relax(problem) galini.timelimit.start_now() triangle_cuts_gen = TriangleCutsGenerator( galini, galini._config.cuts_generator.triangle) triangle_cuts_gen.before_start_at_root(problem, linear_model) lower_bounds, upper_bounds, domains, aux_vars, var_by_id, edges = \ triangle_cuts_gen._detect_bilinear_terms(linear_model) expected_adj = [[1, 1, 1, 0, 0, 1, 0, 1], [1, 0, 1, 1, 1, 0, 1, 1], [1, 1, 1, 1, 0, 1, 1, 1], [0, 1, 1, 1, 1, 1, 1, 1], [0, 1, 0, 1, 0, 1, 1, 1], [1, 0, 1, 1, 1, 1, 1, 1], [0, 1, 1, 1, 1, 1, 0, 0], [1, 1, 1, 1, 1, 1, 0, 1]] var_to_idx = aml.ComponentMap() for i in problem.I: x = linear_model.x[i] var_to_idx[x] = i for (x_id, y_id) in edges: x = var_by_id[x_id] y = var_by_id[y_id] x_idx = var_to_idx[x] y_idx = var_to_idx[y] assert expected_adj[x_idx][y_idx] == 1 # Mark edge as visited expected_adj[x_idx][y_idx] = 0 # Check we visited all edges assert np.all(np.isclose(expected_adj, 0))
def set_input(self, master_vars, tol=1e-6, comm = None): """ It is very important for master_vars to be in the same order for every process. Parameters ---------- master_vars tol """ self.comm = None if comm is not None: self.comm = comm else: self.comm = MPI.COMM_WORLD self.num_subproblems_by_rank = np.zeros(self.comm.Get_size()) del self.cuts self.cuts = pe.ConstraintList() self.subproblems = list() self.master_etas = list() self.complicating_vars_maps = list() self.master_vars = list(master_vars) self.master_vars_indices = pe.ComponentMap() for i, v in enumerate(self.master_vars): self.master_vars_indices[v] = i self.tol = tol self.subproblem_solvers = list() self.all_master_etas = list() self._subproblem_ndx_map = dict()
def solve(self, model, linear_model, mip_solution, tree, node): assert mip_solution.status.is_success(), "Should be a feasible point for the relaxation" model_var_map = node.storage.model_to_relaxation_var_map mip_solution_with_model_vars = pe.ComponentMap( (var, mip_solution.variables[model_var_map[var]]) for var in model.component_data_objects(pe.Var, active=True) ) self.algorithm._update_solver_options(self.algorithm._nlp_solver) primal_solution = solve_primal_with_starting_point( model, mip_solution_with_model_vars, self.algorithm._nlp_solver, self.galini.mc, fix_all=True ) if primal_solution is not None and primal_solution.status.is_success(): return primal_solution self.algorithm._update_solver_options(self.algorithm._nlp_solver) new_primal_solution = solve_primal( model, mip_solution_with_model_vars, self.algorithm._nlp_solver, self.galini.mc ) if new_primal_solution is not None: primal_solution = new_primal_solution return primal_solution
def push_oa_points(self, key=None): """ Save the current list of OA points for later use through pop_oa_points(). """ to_save = [pe.ComponentMap(i) for i in self._oa_points] if key is not None: self._oa_stack_map[key] = to_save else: self._saved_oa_points.append(to_save)
def attach_nonant_var_map(self, scenario_name): instance = self.local_scenarios[scenario_name] subproblem_to_root_vars_map = pyo.ComponentMap() for var, rvar in zip(instance._mpisppy_data.nonant_indices.values(), self.root_vars): if var.name not in rvar.name: raise Exception("Error: Complicating variable mismatch, sub-problem variables changed order") subproblem_to_root_vars_map[var] = rvar # this is for interefacing with PH code instance._mpisppy_model.subproblem_to_root_vars_map = subproblem_to_root_vars_map
def optimization_data(self, model): data = pyo.ComponentMap() fs = model.fs data[fs.product.flow_mass_phase_comp[0, "Liq", "H2O"]] = 0.385923 data[fs.product.flow_mass_phase_comp[0, "Liq", "NaCl"]] = 0.4824263e-3 data[fs.disposal.flow_mass_phase_comp[0, "Liq", "H2O"]] = 0.544077 data[fs.disposal.flow_mass_phase_comp[0, "Liq", "NaCl"]] = 0.695177e-1 data[fs.costing.LCOW] = 1.13407 data[fs.water_recovery] = 0.386405 return data
def simulation_data(self, model): data = pyo.ComponentMap() fs = model.fs data[fs.product.flow_mass_phase_comp[0, 'Liq', 'H2O']] = 0.179331 data[fs.product.flow_mass_phase_comp[0, 'Liq', 'NaCl']] = 0.286037e-3 data[fs.disposal.flow_mass_phase_comp[0, 'Liq', 'H2O']] = 0.750668 data[fs.disposal.flow_mass_phase_comp[0, 'Liq', 'NaCl']] = 0.697139e-1 data[fs.costing.LCOW] = 1.73465 data[fs.water_recovery] = 0.179618 return data
def optimization_data(self, model): data = pyo.ComponentMap() fs = model.fs data[fs.product.flow_mass_phase_comp[0, "Liq", "H2O"]] = 0.732036 data[fs.product.flow_mass_phase_comp[0, "Liq", "NaCl"]] = 0.446025e-3 data[fs.disposal.flow_mass_phase_comp[0, "Liq", "H2O"]] = 0.197967 data[fs.disposal.flow_mass_phase_comp[0, "Liq", "NaCl"]] = 0.695539e-1 data[fs.costing.LCOW] = 1.48679 data[fs.water_recovery] = 0.732481 return data
def initialization_data(self, model): data = pyo.ComponentMap() fs = model.fs data[fs.product.flow_mass_phase_comp[0, "Liq", "H2O"]] = 0.351684 data[fs.product.flow_mass_phase_comp[0, "Liq", "NaCl"]] = 0.269335e-3 data[fs.disposal.flow_mass_phase_comp[0, "Liq", "H2O"]] = 0.606601 data[fs.disposal.flow_mass_phase_comp[0, "Liq", "NaCl"]] = 0.695767e-1 data[fs.costing.LCOW] = 1.66166 data[fs.water_recovery] = 0.5 return data
def optimization_data(self, model): data = pyo.ComponentMap() fs = model.fs data[fs.product.flow_mass_phase_comp[0, "Liq", "H2O"]] = 0.732053 data[fs.product.flow_mass_phase_comp[0, "Liq", "NaCl"]] = 0.454317e-3 data[fs.disposal.flow_mass_phase_comp[0, "Liq", "H2O"]] = 0.197952 data[fs.disposal.flow_mass_phase_comp[0, "Liq", "NaCl"]] = 0.695457e-1 data[fs.costing.LCOW] = 1.18849 data[fs.water_recovery] = 0.732504 return data
def simulation_data(self, model): data = pyo.ComponentMap() fs = model.fs data[fs.product.flow_mass_phase_comp[0, "Liq", "H2O"]] = 0.296269 data[fs.product.flow_mass_phase_comp[0, "Liq", "NaCl"]] = 0.274578e-3 data[fs.disposal.flow_mass_phase_comp[0, "Liq", "H2O"]] = 0.633730 data[fs.disposal.flow_mass_phase_comp[0, "Liq", "NaCl"]] = 0.697254e-1 data[fs.costing.LCOW] = 1.73385 data[fs.water_recovery] = 0.296544 return data
def initialization_data(self, model): data = pyo.ComponentMap() fs = model.fs data[fs.product.flow_mass_phase_comp[0, "Liq", "H2O"]] = 0.298029 data[fs.product.flow_mass_phase_comp[0, "Liq", "NaCl"]] = 0.274413e-3 data[fs.disposal.flow_mass_phase_comp[0, "Liq", "H2O"]] = 0.633927 data[fs.disposal.flow_mass_phase_comp[0, "Liq", "NaCl"]] = 0.697161e-1 data[fs.costing.LCOW] = 1.66663 data[fs.water_recovery] = 0.5 return data
def optimization_data(self, model): data = pyo.ComponentMap() fs = model.fs data[fs.product.flow_mass_phase_comp[0, 'Liq', 'H2O']] = 0.732053 data[fs.product.flow_mass_phase_comp[0, 'Liq', 'NaCl']] = 0.456208e-3 data[fs.disposal.flow_mass_phase_comp[0, 'Liq', 'H2O']] = 0.197952 data[fs.disposal.flow_mass_phase_comp[0, 'Liq', 'NaCl']] = 0.695438e-1 data[fs.costing.LCOW] = 1.17018 data[fs.water_recovery] = 0.732504 return data
def simulation_data(self, model): data = pyo.ComponentMap() fs = model.fs data[fs.product.flow_mass_phase_comp[0, "Liq", "H2O"]] = 0.179331 data[fs.product.flow_mass_phase_comp[0, "Liq", "NaCl"]] = 0.286037e-3 data[fs.disposal.flow_mass_phase_comp[0, "Liq", "H2O"]] = 0.750668 data[fs.disposal.flow_mass_phase_comp[0, "Liq", "NaCl"]] = 0.697139e-1 data[fs.costing.LCOW] = 2.00482 data[fs.water_recovery] = 0.179618 return data
def optimization_data(self, model): data = pyo.ComponentMap() fs = model.fs data[fs.product.flow_mass_phase_comp[0, 'Liq', 'H2O']] = 0.732036 data[fs.product.flow_mass_phase_comp[0, 'Liq', 'NaCl']] = 0.448848e-3 data[fs.disposal.flow_mass_phase_comp[0, 'Liq', 'H2O']] = 0.197967 data[fs.disposal.flow_mass_phase_comp[0, 'Liq', 'NaCl']] = 0.695512e-1 data[fs.costing.LCOW] = 1.46933 data[fs.water_recovery] = 0.732481 return data
def simulation_data(self, model): data = pyo.ComponentMap() fs = model.fs data[fs.product.flow_mass_phase_comp[0, "Liq", "H2O"]] = 0.329390 data[fs.product.flow_mass_phase_comp[0, "Liq", "NaCl"]] = 0.271454e-3 data[fs.disposal.flow_mass_phase_comp[0, "Liq", "H2O"]] = 0.600609 data[fs.disposal.flow_mass_phase_comp[0, "Liq", "NaCl"]] = 0.697285e-1 data[fs.costing.LCOW] = 1.86314 data[fs.water_recovery] = 0.329661 return data
def create_subproblem(root): m = pyo.ConcreteModel() m.x1 = pyo.Var() m.x2 = pyo.Var() m.y = pyo.Var() m.obj = pyo.Objective(expr=-m.x2) m.c1 = pyo.Constraint(expr=(m.x1 - 1)**2 + m.x2**2 <= pyo.log(m.y)) m.c2 = pyo.Constraint(expr=(m.x1 + 1)**2 + m.x2**2 <= pyo.log(m.y)) complicating_vars_map = pyo.ComponentMap() complicating_vars_map[root.y] = m.y return m, complicating_vars_map
def create_subproblem(root, farmer, scenario): m = pyo.ConcreteModel() m.crops = pyo.Set(initialize=farmer.crops, ordered=True) m.devoted_acreage = pyo.Var(m.crops) m.QuantitySubQuotaSold = pyo.Var(m.crops, bounds=(0.0, None)) m.QuantitySuperQuotaSold = pyo.Var(m.crops, bounds=(0.0, None)) m.QuantityPurchased = pyo.Var(m.crops, bounds=(0.0, None)) def EnforceCattleFeedRequirement_rule(m, i): return ( farmer.CattleFeedRequirement[i] <= (farmer.crop_yield[scenario][i] * m.devoted_acreage[i]) + m.QuantityPurchased[i] - m.QuantitySubQuotaSold[i] - m.QuantitySuperQuotaSold[i]) m.EnforceCattleFeedRequirement = pyo.Constraint( m.crops, rule=EnforceCattleFeedRequirement_rule) def LimitAmountSold_rule(m, i): return m.QuantitySubQuotaSold[i] + m.QuantitySuperQuotaSold[ i] - (farmer.crop_yield[scenario][i] * m.devoted_acreage[i]) <= 0.0 m.LimitAmountSold = pyo.Constraint(m.crops, rule=LimitAmountSold_rule) def EnforceQuotas_rule(m, i): return (0.0, m.QuantitySubQuotaSold[i], farmer.PriceQuota[i]) m.EnforceQuotas = pyo.Constraint(m.crops, rule=EnforceQuotas_rule) obj_expr = sum(farmer.PurchasePrice[crop] * m.QuantityPurchased[crop] for crop in m.crops) obj_expr -= sum(farmer.SubQuotaSellingPrice[crop] * m.QuantitySubQuotaSold[crop] for crop in m.crops) obj_expr -= sum(farmer.SuperQuotaSellingPrice[crop] * m.QuantitySuperQuotaSold[crop] for crop in m.crops) m.obj = pyo.Objective( expr=farmer.scenario_probabilities[scenario] * obj_expr) complicating_vars_map = pyo.ComponentMap() for crop in m.crops: complicating_vars_map[ root.devoted_acreage[crop]] = m.devoted_acreage[crop] return m, complicating_vars_map
def branch_at_point(model, current_bounds, branching_point, mc): """Branch problem at branching_point, returning a list of child problems.""" current_bounds = pe.ComponentMap() for var in model.component_data_objects(pe.Var, active=True, descend_into=True): var_lb = var.lb var_ub = var.ub if var_lb is None: var_lb = -np.inf if var_ub is None: var_ub = np.inf current_bounds[var] = (var_lb, var_ub) var = branching_point.variable var_lb, var_ub = current_bounds[var] epsilon = mc.epsilon for point in branching_point.points: is_less_than_ub = almost_le(point, var_ub, atol=epsilon) is_greater_than_lb = almost_ge(point, var_lb, atol=epsilon) if not is_less_than_ub or not is_greater_than_lb: raise RuntimeError( 'Branching outside variable bounds: {} in [{}, {}], branching at {}' .format(var.name, var.lb, var.ub, point)) children = [] new_upper_bound = var_lb is_integer = var.is_integer() or var.is_binary() for point in branching_point.points: new_lower_bound = new_upper_bound new_upper_bound = point var_lower_bound = \ np.ceil(new_lower_bound) if is_integer else new_lower_bound var_upper_bound = \ np.floor(new_upper_bound) if is_integer else new_upper_bound child = copy.copy(current_bounds) child[var] = (var_lower_bound, var_upper_bound) children.append(child) var_lower_bound = \ np.ceil(new_upper_bound) if is_integer else new_upper_bound var_upper_bound = \ np.floor(var_ub) if is_integer else var_ub child = copy.copy(current_bounds) child[var] = (var_lower_bound, var_upper_bound) children.append(child) return children
def _cache_and_set_relaxed_bounds(self, bound_relax_factor): self._bound_cache = pyo.ComponentMap() val = pyo.value for v in self._model.component_data_objects(pyo.Var, active=True, descend_into=True): # we could hit a variable more # than once because of References if v in self._bound_cache: continue if v.lb is None and v.ub is None: continue self._bound_cache[v] = (v.lb, v.ub) sf = get_scaling_factor(v, default=1) if v.lb is not None: v.lb = val((v.lb*sf - bound_relax_factor*max(1, abs(val(v.lb*sf))))/sf) if v.ub is not None: v.ub = val((v.ub*sf + bound_relax_factor*max(1, abs(val(v.ub*sf))))/sf)
def _check_if_problem_is_nolinear(self, relaxed_problem): self._convex_relaxations_map = pe.ComponentMap() for relaxation in relaxed_problem.galini_nonlinear_relaxations: is_convex = (relaxation.is_rhs_convex() and relaxation.relaxation_side in [ RelaxationSide.BOTH, RelaxationSide.UNDER ]) or (relaxation.is_rhs_concave() and relaxation.relaxation_side in [RelaxationSide.BOTH, RelaxationSide.OVER]) if is_convex: rhs_expr = relaxation.get_rhs_expr() rhs_vars = relaxation.get_rhs_vars() aux_var = relaxation.get_aux_var() rel_expr = rhs_expr - aux_var rel_vars = rhs_vars + [aux_var] self._convex_relaxations_map[relaxation] = (rel_expr, rel_vars)
def __init__(self, component): if not mpi4py_available: raise ImportError('BendersCutGenerator requires mpi4py.') if not numpy_available: raise ImportError('BendersCutGenerator requires numpy.') _BlockData.__init__(self, component) self.num_subproblems_by_rank = np.zeros(MPI.COMM_WORLD.Get_size()) self.subproblems = list() self.complicating_vars_maps = list() self.master_vars = list() self.master_vars_indices = pe.ComponentMap() self.master_etas = list() self.cuts = None self.subproblem_solvers = list() self.tol = None self.all_master_etas = list()
def solution_pool_from_solver(solver): if not isinstance(solver, CPLEXDirect): return None solver_model = solver._solver_model solver_pool = solver_model.solution.pool num_sol = solver_pool.get_num() if not num_sol: return None pool = [] var_map = solver._pyomo_var_to_ndx_map for i in range(num_sol): values = solver_pool.get_values(i) vars_to_load = var_map.keys() sol = pe.ComponentMap( (var, value) for var, value in zip(vars_to_load, values)) pool.append(sol) return pool
def __init__(self, component): if not mpi4py_available: raise ImportError('BendersCutGenerator requires mpi4py.') if not numpy_available: raise ImportError('BendersCutGenerator requires numpy.') _BlockData.__init__(self, component) self.num_subproblems_by_rank = 0 #np.zeros(self.comm.Get_size()) self.subproblems = list() self.complicating_vars_maps = list() self.master_vars = list() self.master_vars_indices = pe.ComponentMap() self.master_etas = list() self.cuts = None self.subproblem_solvers = list() self.tol = None self.all_master_etas = list() self._subproblem_ndx_map = dict() # map from ndx in self.subproblems (local) to the global subproblem ndx