Esempio n. 1
0
def constraints_list_of_tuples(model, mylist, sign="="):
    term_0 = mylist[0]
    ROWS, COLUMNS = term_0[0].shape[0], term_0[1].shape[1]
    for row in range(ROWS):
        for column in range(COLUMNS):
            expr = LinExpr()
            for term in mylist:
                q, qp = term[0].shape[1], term[1].shape[0]
                if q != qp:
                    raise ValueError(term, "q=%d qp=%d" % (q, qp))
                if type(term[1][0, column]) == type(model.addVar()):
                    expr.add(
                        LinExpr([(term[0][row, k], term[1][k, column])
                                 for k in range(q)]))
                elif type(term[0][row, 0]) == type(model.addVar()):
                    expr.add(
                        LinExpr([(term[1][k, column], term[0][row, k])
                                 for k in range(q)]))
                else:
                    expr.addConstant(
                        sum([
                            term[1][k, column] * term[0][row, k]
                            for k in range(q)
                        ]))
            if sign == "<":
                model.addConstr(expr <= 0)
            elif sign == "=":
                model.addConstr(expr == 0)
            elif sign == ">=":
                model.addConstr(expr >= 0)
            else:
                raise "sign indefinite"
Esempio n. 2
0
def populate_master(data, open_arcs=None):
    """
    Function that populates the Benders Master problem
    :param data:   Problem data structure
    :param open_arcs: If given, it is a MIP start feasible solution
    :rtype:        Gurobi model object
    """
    master = Model('master-model')
    arcs, periods = xrange(data.arcs.size), xrange(data.periods)
    commodities = xrange(data.commodities)
    graph, origins, destinations = data.graph, data.origins, data.destinations
    variables = np.empty(shape=(data.periods, data.arcs.size), dtype=object)
    bin_vars_idx = np.empty_like(variables, dtype=int)
    continuous_variables = np.empty(shape=(len(periods), len(commodities)),
                                    dtype=object)
    cont_vars_idx = np.empty_like(continuous_variables, dtype=int)

    start_given = open_arcs is not None
    count = 0

    # length of shortest path, shortest path itself
    arc_com, arc_obj = [], []
    lbs = [
        shortest_path_length(graph, origins[com], destinations[com], 'weight')
        for com in commodities
    ]
    sps = [
        shortest_path(graph, origins[com], destinations[com], 'weight')
        for com in commodities
    ]
    # resolve sp by removing one arc, check the increase in value
    for com in commodities:
        incr, best_arc = 0., 0
        for n1, n2 in zip(sps[com], sps[com][1:]):
            weight = graph[n1][n2]['weight']
            graph[n1][n2]['weight'] = 10000. * weight
            spl = shortest_path_length(graph, origins[com], destinations[com],
                                       'weight')
            if spl > incr:
                incr = spl
                best_arc = graph[n1][n2]['arc_id']
            graph[n1][n2]['weight'] = weight
        arc_com.append(best_arc)
        arc_obj.append(spl)

    # Add variables
    for period in periods:
        for arc in arcs:
            # Binary arc variables
            variables[period, arc] = master.addVar(vtype=GRB.BINARY,
                                                   obj=data.fixed_cost[period,
                                                                       arc],
                                                   name='arc_open{}_{}'.format(
                                                       period, arc))
            bin_vars_idx[period, arc] = count
            count += 1
        for com in commodities:
            lb = lbs[com] * data.demand[period, com]
            # Continuous flow_cost variables (eta)
            continuous_variables[period, com] = master.addVar(
                lb=lb,
                obj=1.,
                vtype=GRB.CONTINUOUS,
                name='flow_cost{}'.format((period, com)))
            cont_vars_idx[period, com] = count
            count += 1
    master.update()

    # If feasible solution is given, use it as a start
    if start_given:
        for period in periods:
            for arc in arcs:
                # variables[period, arc].start = open_arcs[period, arc]
                variables[period, arc].VarHintVal = open_arcs[period, arc]
                variables[period, arc].VarHintPri = 1

    # Add constraints
    # Add Origin - Destination Cuts for each Commodity
    cuts_org, cuts_dest = set(), set()
    for commodity in commodities:
        arc_origin = data.origins[commodity]
        arc_destination = data.destinations[commodity]
        if arc_origin not in cuts_org:
            out_origin = get_2d_index(data.arcs,
                                      data.nodes)[0] - 1 == arc_origin
            master.addConstr(lhs=np.sum(variables[0, out_origin]),
                             rhs=1.,
                             sense=GRB.GREATER_EQUAL,
                             name='origins_c{}'.format(commodity))
            cuts_org.add(arc_origin)
        if arc_destination not in cuts_dest:
            in_dest = get_2d_index(data.arcs,
                                   data.nodes)[1] - 1 == arc_destination
            master.addConstr(lhs=np.sum(variables[0, in_dest]),
                             rhs=1.,
                             sense=GRB.GREATER_EQUAL,
                             name='destinations_c{}'.format(commodity))
            cuts_dest.add(arc_destination)

    # Add that an arc can open at most once
    for arc in arcs:
        master.addSOS(GRB.SOS_TYPE1, variables[:, arc].tolist(),
                      list(periods)[::-1])

    # Add extra constraints for lower bound improvement
    for com in commodities:
        arc = arc_com[com]
        base_coeffs = lbs[com] - arc_obj[com]
        for period in periods:
            lhs = LinExpr()
            coeffs = [
                cf * data.demand[period, com]
                for cf in [base_coeffs] * (period + 1)
            ]
            lhs.addTerms(coeffs, variables[:period + 1, arc].tolist())
            lhs.add(-continuous_variables[period, com])
            lhs.addConstant(arc_obj[com] * data.demand[period, com])
            master.addConstr(lhs,
                             sense=GRB.LESS_EQUAL,
                             rhs=0,
                             name='strengthening_{}{}'.format(period, com))

    master.params.LazyConstraints = 1
    # Find feasible solutions quickly, works better
    master.params.TimeLimit = 7200
    master.params.threads = 2
    master.params.BranchDir = 1
    # Store the variables inside the model, we cannot access them later!
    master._variables = np.array(master.getVars())
    master._cont_vars_idx = cont_vars_idx
    master._bin_vars_idx = bin_vars_idx
    return master
Esempio n. 3
0
    def suggest_experiments(
        self, num_experiments=1, prev_res: DataSet = None, **kwargs
    ):
        """Suggest experiments using ENTMOOT tree-based Bayesian Optimization

        Parameters
        ----------
        num_experiments: int, optional
            The number of experiments (i.e., samples) to generate. Default is 1.
        prev_res: :class:`~summit.utils.data.DataSet`, optional
            Dataset with data from previous experiments of previous iteration.
            If no data is passed, then random sampling will
            be used to suggest an initial design.

        Returns
        -------
        next_experiments : :class:`~summit.utils.data.DataSet`
            A Dataset object with the suggested experiments

        """

        param = None
        xbest = np.zeros(self.domain.num_continuous_dimensions())
        obj = self.domain.output_variables[0]
        objective_dir = -1.0 if obj.maximize else 1.0
        fbest = float("inf")

        bounds = [k["domain"] for k in self.input_domain]

        space = Space(bounds)
        core_model = get_core_gurobi_model(space)
        gvars = core_model.getVars()

        for c in self.constraints:
            left = LinExpr()
            left.addTerms(c[0], gvars)
            left.addConstant(c[1])
            core_model.addLConstr(left, c[2], 0)

        core_model.update()

        entmoot_model = Optimizer(
            dimensions=bounds,
            base_estimator=self.estimator_type,
            std_estimator=self.std_estimator_type,
            n_initial_points=self.initial_points,
            initial_point_generator=self.generator_type,
            acq_func=self.acquisition_type,
            acq_optimizer=self.optimizer_type,
            random_state=None,
            acq_func_kwargs=None,
            acq_optimizer_kwargs={"add_model_core": core_model},
            base_estimator_kwargs={"min_child_samples": self.min_child_samples},
            std_estimator_kwargs=None,
            model_queue_size=None,
            verbose=False,
        )

        # If we have previous results:
        if prev_res is not None:
            # Get inputs and outputs
            inputs, outputs = self.transform.transform_inputs_outputs(
                prev_res, transform_descriptors=self.use_descriptors
            )

            # Set up maximization and minimization by converting maximization to minimization problem
            for v in self.domain.variables:
                if v.is_objective and v.maximize:
                    outputs[v.name] = -1 * outputs[v.name]
                if isinstance(v, CategoricalVariable):
                    if not self.use_descriptors:
                        inputs[v.name] = self.categorical_wrapper(
                            inputs[v.name], v.levels
                        )

            inputs = inputs.to_numpy()
            outputs = outputs.to_numpy()

            if self.prev_param is not None:
                X_step = self.prev_param[0]
                Y_step = self.prev_param[1]

                X_step = np.vstack((X_step, inputs))
                Y_step = np.vstack((Y_step, outputs))

            else:
                X_step = inputs
                Y_step = outputs
            # Convert to list form to give to optimizer
            prev_X = [list(x) for x in X_step]
            prev_y = [y for x in Y_step for y in x]

            # Train entmoot model
            entmoot_model.tell(prev_X, prev_y, fit=True)

            # Store parameters (history of suggested points and function evaluations)
            param = [X_step, Y_step]
            fbest = np.min(Y_step)
            xbest = X_step[np.argmin(Y_step)]

        request = np.array(
            entmoot_model.ask(n_points=num_experiments, strategy="cl_mean")
        )
        # Generate DataSet object with variable values of next
        next_experiments = None
        transform_descriptors = False
        if request is not None and len(request) != 0:
            next_experiments = {}
            i_inp = 0
            for v in self.domain.variables:
                if not v.is_objective:
                    if isinstance(v, CategoricalVariable):
                        if v.ds is None or not self.use_descriptors:
                            cat_list = []
                            for j, entry in enumerate(request[:, i_inp]):
                                cat_list.append(
                                    self.categorical_unwrap(entry, v.levels)
                                )
                            next_experiments[v.name] = np.asarray(cat_list)
                            i_inp += 1
                        else:
                            descriptor_names = v.ds.data_columns
                            for d in descriptor_names:
                                next_experiments[d] = request[:, i_inp]
                                i_inp += 1
                            transform_descriptors = True
                    else:
                        next_experiments[v.name] = request[:, i_inp]
                        i_inp += 1
            next_experiments = DataSet.from_df(pd.DataFrame(data=next_experiments))
            next_experiments[("strategy", "METADATA")] = "ENTMOOT"

        self.fbest = objective_dir * fbest
        self.xbest = xbest
        self.prev_param = param

        # Do any necessary transformation back
        next_experiments = self.transform.un_transform(
            next_experiments, transform_descriptors=self.use_descriptors
        )

        return next_experiments