def robust_utility(item, answered_queries, verbose=False, gamma_inconsistencies=0.0):
    # return the item's minimum utility within uncertainty set U generated by answered_queries
    # also return the u-vector that produces this minimum value

    num_features = len(item.features)

    # create the u-set model
    m = create_mip_model()

    u_vars = u_set_model(
        answered_queries, num_features, m, gamma_inconsistencies=gamma_inconsistencies
    )

    # add the objective -- the valuation of the item
    obj_expr = quicksum([u_vars[i] * item.features[i] for i in range(num_features)])

    m.setObjective(obj_expr, sense=GRB.MINIMIZE)

    m.optimize()

    # check for infeasiblity...
    if m.status == GRB.INFEASIBLE:
        if verbose:
            raise Warning("agent utility model is infeasible")
        return None, None

    # return the objective value
    return m.ObjVal, [var.x for var in u_vars.values()]
示例#2
0
def create_picef_model(cfg):
    """Optimise using the PICEF formulation.

    Args:
        cfg: an OptConfig object

    Returns:
        an OptSolution object
    """

    cycles = cfg.digraph.find_cycles(cfg.max_cycle)

    m = create_mip_model(time_lim=cfg.timelimit, verbose=cfg.verbose)
    m.params.method = -1

    cycle_vars = [m.addVar(vtype=GRB.BINARY) for __ in cycles]

    vtx_to_vars = [[] for __ in cfg.digraph.vs]

    add_chain_vars_and_constraints(
        cfg.digraph,
        cfg.ndds,
        cfg.use_chains,
        cfg.max_chain,
        m,
        vtx_to_vars,
        store_edge_positions=cfg.edge_success_prob != 1)

    for i, c in enumerate(cycles):
        for v in c:
            vtx_to_vars[v.id].append(cycle_vars[i])

    for l in vtx_to_vars:
        if len(l) > 0:
            m.addConstr(quicksum(l) <= 1)

    # add variables for each pair-pair edge indicating whether it is used in a cycle or chain
    for e in cfg.digraph.es:
        used_in_cycle = []
        for var, c in zip(cycle_vars, cycles):
            if kidney_utils.cycle_contains_edge(c, e):
                used_in_cycle.append(var)
        used_var = m.addVar(vtype=GRB.INTEGER)
        if cfg.use_chains:
            m.addConstr(used_var == quicksum(used_in_cycle) +
                        quicksum(e.grb_vars))
        else:
            m.addConstr(used_var == quicksum(used_in_cycle))
        e.used_var = used_var

    # number of edges in the matching
    num_edges_var = m.addVar(vtype=GRB.INTEGER)
    pair_edge_count = [e.used_var for e in cfg.digraph.es]
    if cfg.use_chains:
        ndd_edge_count = [e.edge_var for ndd in cfg.ndds for e in ndd.edges]
        m.addConstr(num_edges_var == quicksum(pair_edge_count +
                                              ndd_edge_count))
    else:
        m.addConstr(num_edges_var == quicksum(pair_edge_count))

    # add a cardinality restriction if necessary
    if cfg.cardinality_restriction is not None:
        m.addConstr(num_edges_var <= cfg.cardinality_restriction)

    m.update()

    return m, cycles, cycle_vars, num_edges_var
def static_mip_optimal(
    items,
    K,
    valid_responses,
    time_lim=TIME_LIM,
    cut_1=True,
    cut_2=True,
    start_queries=None,
    fixed_queries=None,
    fixed_responses=None,
    start_rec=None,
    subproblem_list=None,
    displayinterval=None,
    gamma_inconsistencies=0.0,
    problem_type="maximin",
    raise_gurobi_time_limit=True,
    log_problem_size=False,
    logger=None,
    u0_type="box",
    artificial_bounds=False,
):
    """
    finds the robust-optimal query set, given a set of items.

    input:
    - items : a list of Item objects
    - K : the number of queries to be selected
    - start_queries : list of K queries to use as a warm start. do not need to be sorted.
    - fixed_queries : list of queries to FIX. length of this list must be <=K. these are fixed as the FIRST queries (order is arbitrary anyhow)
    - fixed_responses : list of responses for FIX, for the first n <= K queries. (alternative to using arg response_subset)
    - cut_1 : (bool) use cut restricting values of p and q (p < q)
    - cut_2 : (bool) use cut restricting order of queries (lexicographical order of (p,q) pairs)
    - valid_responses : list of ints, either [1, -1, 0] (indifference) or [1, -1] (no indifference)
    - response_subset : subset of scenarios S, where S[i] is a list of ints {-1, 0, 1}, of len K
    - logfile: if specified, write a gurobi logfile at this path
    - gamma_inconsistencies: (float). assumed upper bound of agent inconsistencies. increasing gamma increases the
        size of the uncertainty set
    - problem_type : (str). either 'maximin' or 'mmr'. if maximin, solve the maximin robust recommendation
        problem. if mmr, solve the minimax regret problem.

    output:
    - query_list : a list of Query objects
    - start_rec : dict where keys are response scenarios, values are indices of recommended item
    """

    if fixed_queries is None:
        fixed_queries = []
    assert problem_type in ["maximin", "mmr"]

    # indifference responses not supported
    assert set(valid_responses) == {-1, 1}

    # number of features for each item
    num_features = len(items[0].features)

    # polyhedral definition for U^0, B_mat and b_vec
    B_mat, b_vec = get_u0(u0_type, num_features)

    # number of items
    num_items = len(items)

    # lambda variables (dual variables for the initial uncertainty set):
    # lam_vars[r,i] is the i^th dual variable (for i = 1,...,m_const) for the r^th response scenario
    # recall: B_mat (m_const x n), and b_vec (m_const x 1)
    m_const = len(b_vec)
    assert B_mat.shape == (m_const, num_features)

    # get the logfile from the logger, if there is one
    if logger is not None:
        log_file = logger.handlers[0].baseFilename
    else:
        log_file = None

    # define the mip model
    m = create_mip_model(time_lim=time_lim,
                         log_file=log_file,
                         displayinterval=displayinterval)

    # the objective
    tau = m.addVar(vtype=GRB.CONTINUOUS,
                   lb=-GRB.INFINITY,
                   ub=GRB.INFINITY,
                   name="tau")

    if problem_type == "maximin":
        m.setObjective(tau, sense=GRB.MAXIMIZE)
        if artificial_bounds:
            # artificial objective bound
            obj_bound = 1000
            m.addConstr(tau <= obj_bound, name="artificial_obj_bound")
    if problem_type == "mmr":
        m.setObjective(tau, sense=GRB.MINIMIZE)
        # artificial objective bound
        obj_bound = -1000
        m.addConstr(tau >= obj_bound, name="artificial_obj_bound")

    # all possible agent response scenarios
    if subproblem_list is None:
        # each subproblem is a single response scenario
        scenario_list = list(itertools.product(valid_responses, repeat=K))
        num_scenarios = int(np.power(len(valid_responses), K))
        assert num_scenarios == len(scenario_list)
    else:
        # each subproblem should be a single response scenario
        # assert that every response in the subset is a valid response
        for r in subproblem_list:
            assert set(r).difference(set(valid_responses)) == set([])
        scenario_list = subproblem_list

    if fixed_responses is not None:
        # assert subproblem_list is None
        # f = len(fixed_responses)
        # t = tuple(fixed_responses)
        # assert f <= K
        # r_list = list(r for r in itertools.product(valid_responses, repeat=K) if r[:f] == t)
        raise NotImplemented("not implemented")

    # define integer variables - this is the same for both MMR and maximin problem types
    p_vars, q_vars, w_vars = add_integer_variables(
        m,
        num_items,
        K,
        start_queries=start_queries,
        cut_1=cut_1,
        cut_2=cut_2,
        fixed_queries=fixed_queries,
    )

    # now add continuous variables for each response scenario
    if problem_type == "maximin":
        y_vars = {}
        alpha_vars = {}
        beta_vars = {}
        v_bar_vars = {}
        w_bar_vars = {}
        for i, r in enumerate(scenario_list):
            (
                alpha_vars[r],
                beta_vars[r],
                v_bar_vars[r],
                w_bar_vars[r],
            ) = add_r_constraints(
                m,
                tau,
                p_vars,
                q_vars,
                K,
                r,
                i,
                m_const,
                items,
                num_items,
                num_features,
                B_mat,
                b_vec,
                y_vars=y_vars,
                problem_type=problem_type,
                fixed_queries=fixed_queries,
                gamma_inconsistencies=gamma_inconsistencies,
            )

    if problem_type == "mmr":
        # store y_vars for each scenario
        y_vars = {}
        alpha_vars = {}
        beta_vars = {}
        v_bar_vars = {}
        w_bar_vars = {}
        for i, r in enumerate(scenario_list):
            for item in items:
                (
                    alpha_vars[r, item.id],
                    beta_vars[r, item.id],
                    v_bar_vars[r, item.id],
                    w_bar_vars[r, item.id],
                ) = add_r_constraints(
                    m,
                    tau,
                    p_vars,
                    q_vars,
                    K,
                    r,
                    i,
                    m_const,
                    items,
                    num_items,
                    num_features,
                    B_mat,
                    b_vec,
                    y_vars=y_vars,
                    problem_type=problem_type,
                    mmr_item=item,
                    fixed_queries=fixed_queries,
                    gamma_inconsistencies=gamma_inconsistencies,
                )

    m.update()

    if log_problem_size and logger is not None:
        logger.info(f"total variables: {m.numvars}")
        logger.info(f"total constraints: {m.numconstrs}")

    # m.params.DualReductions = 0
    try:
        optimize(m, raise_warnings=False)
    except GurobiTimeLimit:
        if raise_gurobi_time_limit:
            raise GurobiTimeLimit

    if m.status == GRB.TIME_LIMIT:
        time_limit_reached = True
    else:
        time_limit_reached = False

    if artificial_bounds and logger is not None:
        if abs(tau.x - obj_bound) <= 1e-3:
            logger.info(
                f"problem is likely unbounded: tau = obj_bound = {obj_bound}")
    try:
        # get the indices of the optimal queries
        p_inds = [-1 for _ in range(K)]
        q_inds = [-1 for _ in range(K)]
        for k in range(K):
            p_list = [np.round(p_vars[i, k].x) for i in range(num_items)]
            p_inds[k] = int(np.argwhere(p_list))
            q_list = [np.round(q_vars[i, k].x) for i in range(num_items)]
            q_inds[k] = int(np.argwhere(q_list))
    except:
        # if failed for some reason...

        lp_file = generate_filepath(os.getenv("HOME"), "static_milp_problem",
                                    "lp")
        m.write(lp_file)
        if logger is not None:
            logger.info(
                f"static MIP failed, model status = {m.status}, writing LP file to {lp_file}"
            )
        raise StaticMIPFailed

    # get indices of recommended items
    rec_inds = {}
    # for i_r, r in enumerate(r_list):
    #     y_list = [np.round(y_vars[i_r][i].x) for i in range(num_items)]
    #     rec_inds[r] = int(np.argwhere(y_list))

    return (
        [Query(items[p_inds[k]], items[q_inds[k]]) for k in range(K)],
        m.objVal,
        time_limit_reached,
        rec_inds,
    )
示例#4
0
def create_picef_model(cfg, check_edge_success=False):
    """Optimise using the PICEF formulation.

    Args:
        cfg: an OptConfig object
        check_edge_success: (bool). if True, check if each edge has e.success = False. if e.success=False, the edge cannot
            be used.

    Returns:
        an OptSolution object
    """

    cycles = cfg.digraph.find_cycles(cfg.max_cycle)

    m = create_mip_model(time_lim=cfg.timelimit, verbose=cfg.verbose)
    m.params.method = -1

    cycle_vars = [m.addVar(vtype=GRB.BINARY) for __ in cycles]

    vtx_to_vars = [[] for __ in cfg.digraph.vs]

    add_chain_vars_and_constraints(
        cfg.digraph,
        cfg.ndds,
        cfg.use_chains,
        cfg.max_chain,
        m,
        vtx_to_vars,
        store_edge_positions=True,
        check_edge_success=check_edge_success,
    )

    for i, c in enumerate(cycles):
        for v in c:
            vtx_to_vars[v.id].append(cycle_vars[i])

    for l in vtx_to_vars:
        if len(l) > 0:
            m.addConstr(quicksum(l) <= 1)

    # add variables for each pair-pair edge indicating whether it is used in a cycle or chain
    for e in cfg.digraph.es:
        used_in_cycle = []
        for var, c in zip(cycle_vars, cycles):
            if kidney_utils.cycle_contains_edge(c, e):
                used_in_cycle.append(var)

        used_var = m.addVar(vtype=GRB.INTEGER)
        if check_edge_success:
            if not e.success:
                m.addConstr(used_var == 0)

        if cfg.use_chains:
            m.addConstr(used_var == quicksum(used_in_cycle) +
                        quicksum(e.grb_vars))
        else:
            m.addConstr(used_var == quicksum(used_in_cycle))
        e.used_var = used_var

    # add cycle objects
    cycle_list = []
    for c, var in zip(cycles, cycle_vars):
        c_obj = Cycle(c)
        c_obj.add_edges(cfg.digraph.es)
        c_obj.weight = failure_aware_cycle_weight(c_obj.vs, cfg.digraph,
                                                  cfg.edge_success_prob)
        c_obj.grb_var = var
        cycle_list.append(c_obj)

    # add objective
    if not cfg.use_chains:
        obj_expr = quicksum(
            failure_aware_cycle_weight(c, cfg.digraph, cfg.edge_success_prob) *
            var for c, var in zip(cycles, cycle_vars))
    elif cfg.edge_success_prob == 1:
        obj_expr = (quicksum(
            cycle_weight(c, cfg.digraph) * var
            for c, var in zip(cycles, cycle_vars)) +
                    quicksum(e.weight * e.edge_var for ndd in cfg.ndds
                             for e in ndd.edges) +
                    quicksum(e.weight * var for e in cfg.digraph.es
                             for var in e.grb_vars))
    else:
        obj_expr = (quicksum(
            failure_aware_cycle_weight(c, cfg.digraph, cfg.edge_success_prob) *
            var for c, var in zip(cycles, cycle_vars)) +
                    quicksum(e.weight * cfg.edge_success_prob * e.edge_var
                             for ndd in cfg.ndds for e in ndd.edges) +
                    quicksum(
                        e.weight * cfg.edge_success_prob**(pos + 1) * var
                        for e in cfg.digraph.es
                        for var, pos in zip(e.grb_vars, e.grb_var_positions)))
    m.setObjective(obj_expr, GRB.MAXIMIZE)

    m.update()

    # attach the necessary objects to the optconfig
    cfg.m = m
    cfg.cycles = cycles
    cfg.cycle_vars = cycle_vars
    cfg.cycle_list = cycle_list
示例#5
0
def feasibility_subproblem(
    z_vec_list,
    valid_responses,
    K,
    items,
    B_mat,
    b_vec,
    time_lim=TIME_LIM,
    problem_type="maximin",
    gamma_inconsistencies=0.0,
):
    # solve the scenario decomposition subproblem.

    # indifference response is not supported
    assert set(valid_responses) == set([-1, 1])

    assert problem_type in ["maximin", "mmr"]

    num_items = len(items)
    num_features = len(items[0].features)

    # recall: B_mat (m_const x n), and b_vec (m_const x 1)
    m_const = len(b_vec)
    assert B_mat.shape == (m_const, num_features)

    m = create_mip_model(time_lim=time_lim)
    m.params.OptimalityTol = 1e-8

    if gamma_inconsistencies > 0:
        xi_vars = m.addVars(K, lb=0.0, ub=GRB.INFINITY)
        m.addConstr(quicksum(xi_vars) <= gamma_inconsistencies)
    else:
        xi_vars = np.zeros(K)

    # objective value
    theta_var = m.addVar(
        vtype=GRB.CONTINUOUS, lb=-GRB.INFINITY, ub=GRB.INFINITY, name="theta"
    )

    # decision variables for response scenario
    # s_k = s_plus - s_minus, and either s_plus or s_minus == 1
    s_plus_vars = m.addVars(K, vtype=GRB.BINARY, name="s_plus")
    s_minus_vars = m.addVars(K, vtype=GRB.BINARY, name="s_minus")

    # only one response is possible
    for k in range(K):
        m.addConstr(s_plus_vars[k] + s_minus_vars[k] == 1, name="s_const")
        m.addSOS(GRB.SOS_TYPE1, [s_plus_vars[k], s_minus_vars[k]])

    # add constraints for the utility of each item x
    # u_vars for each item
    u_vars = m.addVars(
        num_items,
        num_features,
        vtype=GRB.CONTINUOUS,
        lb=-GRB.INFINITY,
        ub=GRB.INFINITY,
        name="u",
    )

    # v_vars_list[i] is the list of variables to select the MMR item in response to item i
    v_var_list = [None for _ in range(num_items)]
    nu_vars_list = [None for _ in range(num_items)]

    for i_item, item in enumerate(items):

        if problem_type == "mmr":

            # for mmr only: use binary variables to select the item that maximizes regret
            # v_vars[i, j] = 1 if item j is selected to maximize regret for item i
            # for each i, y_vars[i, j] can be >0 for only one j (sos1)
            v_vars = m.addVars(num_items, vtype=GRB.BINARY)
            m.addConstr(quicksum(v_vars) == 1.0)
            m.addSOS(GRB.SOS_TYPE1, [v_vars[i] for i in range(num_items)])

            v_var_list[i_item] = v_vars

            nu_vars = m.addVars(
                num_items,
                num_features,
                vtype=GRB.CONTINUOUS,
                lb=-GRB.INFINITY,
                ub=GRB.INFINITY,
            )
            nu_vars_list[i_item] = nu_vars

            # linearize the term nu_ij = v_i * u_j
            for i in range(num_items):
                for j in range(num_features):
                    m.addConstr(nu_vars[i, j] <= M * v_vars[i])
                    m.addConstr(nu_vars[i, j] >= -M * v_vars[i])
                    m.addConstr(
                        nu_vars[i, j] <= u_vars[i_item, j] + M * (1.0 - v_vars[i])
                    )
                    m.addConstr(
                        nu_vars[i, j] >= u_vars[i_item, j] - M * (1.0 - v_vars[i])
                    )

        # U^0 constraints for each u^x
        for i_row in range(m_const):
            m.addConstr(
                quicksum(
                    B_mat[i_row, i_feat] * u_vars[i_item, i_feat]
                    for i_feat in range(num_features)
                )
                >= b_vec[i_row],
                name=("U0_const_row_r%d_i%d" % (i_row, i_item)),
            )

        if problem_type == "maximin":
            m.addConstr(
                theta_var
                >= quicksum(
                    [
                        u_vars[i_item, i_feat] * item.features[i_feat]
                        for i_feat in range(num_features)
                    ]
                ),
                name=("theta_constr_i%d" % i_item),
            )
        if problem_type == "mmr":
            rhs_1 = quicksum(
                [
                    quicksum(
                        [nu_vars[i, j] * items[i].features[j] for i in range(num_items)]
                    )
                    for j in range(num_features)
                ]
            )
            rhs_2 = quicksum(
                [
                    u_vars[i_item, i_feat] * item.features[i_feat]
                    for i_feat in range(num_features)
                ]
            )
            m.addConstr(theta_var <= rhs_1 - rhs_2, name=("theta_constr_i%d" % i_item))

        # add constraints on U(z, s)
        for i_k, z_vec in enumerate(z_vec_list):
            m.addConstr(
                quicksum(
                    [
                        u_vars[i_item, i_feat] * z_vec[i_feat]
                        for i_feat in range(num_features)
                    ]
                )
                + xi_vars[i_k]
                >= -M * (1 - s_plus_vars[i_k]),
                name=("U_s_plus_k%d" % i_k),
            )
            m.addConstr(
                quicksum(
                    [
                        u_vars[i_item, i_feat] * z_vec[i_feat]
                        for i_feat in range(num_features)
                    ]
                )
                - xi_vars[i_k]
                <= M * (1 - s_minus_vars[i_k]),
                name=("U_s_minus_k%d" % i_k),
            )

    if problem_type == "maximin":
        m.setObjective(theta_var, sense=GRB.MINIMIZE)
    if problem_type == "mmr":
        m.setObjective(theta_var, sense=GRB.MAXIMIZE)

    m.update()

    # set dualreductions = 0 to distinguish between infeasible/unbounded
    # m.params.DualReductions = 0
    optimize(m)

    try:
        # get the optimal response scenario
        s_opt = [
            int(round(s_plus_vars[i_k].x - s_minus_vars[i_k].x)) for i_k in range(K)
        ]
        objval = m.objval

    except Exception as e:
        print(e)
        raise

    return s_opt, objval
def solve_recommendation_problem(
    answered_queries,
    items,
    problem_type,
    gamma=0,
    verbose=False,
    fixed_rec_item=None,
    u0_type="box",
    logger=None,
):
    """solve the robust recommendation problem, and return the recommended item and worst-case utility vector"""

    valid_responses = [-1, 1]

    assert set([q.response
                for q in answered_queries]).issubset(set(valid_responses))
    assert problem_type in ["maximin", "mmr"]
    assert gamma >= 0

    # some constants
    K = len(answered_queries)
    num_features = len(items[0].features)
    z_vectors = [q.z for q in answered_queries]
    responses = [q.response for q in answered_queries]

    # polyhedral definition for U^0, b_mat and b_vec
    b_mat, b_vec = get_u0(u0_type, num_features)

    # define beta vars (more dual variables)
    m_const = len(b_vec)

    if logger is not None:
        log_file = logger.handlers[0].baseFilename
        logger.debug("writing gurobi logs for recommendation problem")
    else:
        log_file = None

    # set up the Gurobi model
    m = create_mip_model(verbose=verbose, log_file=log_file)

    # if the recommended item is fixed, don't create y vars
    if fixed_rec_item is not None:
        assert isinstance(fixed_rec_item, Item)
        y_vars = None
    else:
        # y vars : to select x^r, the recommended item in scenario r
        y_vars = m.addVars(len(items), vtype=GRB.BINARY, name="y")
        m.addSOS(GRB.SOS_TYPE1, [y_vars[i] for i in range(len(items))])
        m.addConstr(quicksum(y_vars[i] for i in range(len(items))) == 1,
                    name="y_constr")
        fixed_rec_item = None

    # add dual variables
    if problem_type == "maximin":
        mu_var, alpha_vars, beta_vars = add_rec_dual_variables(
            m,
            K,
            gamma,
            problem_type,
            m_const,
            y_vars,
            num_features,
            items,
            b_mat,
            responses,
            z_vectors,
            fixed_rec_item,
        )
    if problem_type == "mmr":
        theta_var = m.addVar(vtype=GRB.CONTINUOUS,
                             lb=-GRB.INFINITY,
                             ub=GRB.INFINITY,
                             name="theta")
        beta_vars = {}
        alpha_vars = {}
        mu_vars = {}
        for item in items:
            (
                mu_vars[item.id],
                alpha_vars[item.id],
                beta_vars[item.id],
            ) = add_rec_dual_variables(
                m,
                K,
                gamma,
                problem_type,
                m_const,
                y_vars,
                num_features,
                items,
                b_mat,
                responses,
                z_vectors,
                fixed_rec_item,
                mmr_item=item,
            )
            m.addConstr(theta_var >= quicksum(
                [b_vec[j] * beta_vars[item.id][j]
                 for j in range(m_const)]) + gamma * mu_vars[item.id])

    if problem_type == "maximin":
        obj = (quicksum([b_vec[j] * beta_vars[j]
                         for j in range(m_const)]) + gamma * mu_var)
        m.setObjective(obj, sense=GRB.MAXIMIZE)
    elif problem_type == "mmr":
        m.setObjective(theta_var, sense=GRB.MINIMIZE)

    m.Params.DualReductions = 0
    optimize(m)

    # --- gather results ---

    # if the model is unbounded (uncertainty set it empty), return None
    if m.status == GRB.INF_OR_UNBD:
        lp_file = os.path.join(os.getenv("HOME"),
                               "recommendation_problem_infeas_unbd.lp")
        ilp_file = os.path.join(os.getenv("HOME"),
                                "recommendation_problem_infeas_unbd.ilp")
        print(
            f"badly-behaved model. writing lp to: {lp_file}, writing ilp to: {ilp_file}"
        )
        m.computeIIS()
        m.write(lp_file)
        m.write(ilp_file)
        raise Exception("model infeasible or unbounded")
    if m.status == GRB.UNBOUNDED:
        lp_file = os.path.join(os.getenv("HOME"),
                               "recommendation_problem_infeas_unbd.lp")
        print(f"badly-behaved model. writing lp to: {lp_file}")
        m.write(lp_file)
        raise Exception("model is unbounded")

    assert m.status == GRB.OPTIMAL

    if fixed_rec_item is not None:
        return m.objVal, fixed_rec_item
    else:
        # find the recommended item
        y_vals = np.array([var.x for var in y_vars.values()])
        selected_items = np.argwhere(y_vals > 0.5)

        # there can only be one recommended item
        assert len(selected_items) == 1
        recommended_item = items[selected_items[0][0]]

        # # finally, find the minimum u-vector
        return m.objVal, recommended_item