Esempio n. 1
0
def identify_infeasibility(fn, parameters, terms, **kwargs):
    # TODO: apply to general constraint networks
    output = fn(parameters, terms, **kwargs)
    if output:
        return output
    active_indices = {i for i, term in enumerate(terms) if term[0] != MINIMIZE}
    for index in list(active_indices):
        constraints = [terms[i] for i in active_indices - {index}]
        output = fn(parameters, constraints, **kwargs)
        if output:
            active_indices.remove(index)
    # TODO: be careful about removing variables
    infeasible_facts = [terms[index] for index in sorted(active_indices)]
    print('Inconsistent:', infeasible_facts)
    return OptimizerOutput(infeasible=[infeasible_facts])
Esempio n. 2
0
    def fn(outputs, facts, hint={}):
        print(outputs, facts)

        model = Model(name='TAMP')
        model.setParam(GRB.Param.OutputFlag, verbose)
        if max_time < INF:
            model.setParam(GRB.Param.TimeLimit, max_time)

        var_from_param = {}
        for fact in facts:
            prefix, args = fact[0], fact[1:]
            if prefix in ['wcash', 'pcash', 'mcash']:
                cash, = args
                if is_parameter(cash):
                    # TODO: scale by 100 for cents
                    var_from_param[cash] = model.addVar(
                        lb=0,
                        ub=GRB.INFINITY,
                        vtype=GRB.INTEGER if integer else GRB.CONTINUOUS)
                    if prefix == 'wcash':
                        model.addConstr(var_from_param[cash] >= min_take)
                        if max_take < INF:
                            model.addConstr(var_from_param[cash] <= max_take)
                    if (prefix == 'pcash') and (max_wallet < INF):
                        model.addConstr(var_from_param[cash] <= max_wallet)
                    if prefix == 'mcash':
                        # min_balance >= 0
                        pass

        get_var = lambda p: var_from_param[p] if is_parameter(
            p) else p  # var_from_param.get(p, p)

        objective_terms = []
        for index, fact in enumerate(facts):
            name = str(index)
            if fact[0] == MINIMIZE:
                fact = fact[1]
                func, args = fact[0], map(get_var, fact[1:])
                if func == 'withdrawcost':
                    cash, = args
                    objective_terms.append(cash)
            elif fact[0] == NOT:
                fact = fact[1]
                predicate, args = fact[0], map(get_var, fact[1:])
            else:
                prefix, args = fact[0], map(get_var, fact[1:])
                if prefix == 'ge':
                    cash1, cash2 = args
                    model.addConstr(cash1 >= cash2, name=name)
                elif prefix == 'withdraw':
                    wcash, pcash1, pcash2, mcash1, mcash2 = args
                    model.addConstr(pcash1 + wcash == pcash2, name=name)
                    model.addConstr(mcash1 - wcash == mcash2, name=name)
        model.setObjective(quicksum(objective_terms), sense=GRB.MINIMIZE)

        try:
            model.optimize()
        except GurobiError as e:
            raise e

        objective = 0
        if objective_terms:
            objective = INF if model.status == GRB.INFEASIBLE else model.objVal
        print('Objective: {:.3f} | Solutions: {} | Status: {}'.format(
            objective, model.solCount, model.status))

        # https://www.gurobi.com/documentation/9.0/refman/optimization_status_codes.html
        if not model.solCount:  # GRB.INFEASIBLE | GRB.INF_OR_UNBD | OPTIMAL | SUBOPTIMAL | UNBOUNDED
            return OptimizerOutput()
        assignment = tuple(get_var(out).x for out in outputs)
        return OptimizerOutput(assignments=[assignment])
Esempio n. 3
0
    def fn(outputs, facts, hint={}):
        # TODO: pass in the variables and constraint streams instead?
        # The true test is placing two blocks in a tight region obstructed by one
        positive, negative, costs = partition_facts(facts)
        #print('Parameters:', outputs)
        print('Constraints:', positive + negative)
        if costs:
            print('Costs:', costs)
        model = Model(name='TAMP')
        model.setParam(GRB.Param.OutputFlag, verbose)
        model.setParam(GRB.Param.TimeLimit, max_time)

        var_from_param = {}
        for fact in facts:
            prefix, args = fact[0], fact[1:]
            if prefix == 'conf':
                param, = args
                if is_parameter(param):
                    var_from_param[param] = np_var(model)
            elif prefix == 'pose':
                _, param = args
                if is_parameter(param):
                    var_from_param[param] = np_var(model)
            elif prefix == 'traj':
                raise NotImplementedError()
                #param, = args
                #if param not in var_from_id:
                #    var_from_id[id(param)] = [np_var(model), np_var(model)]

        def get_var(p):
            return var_from_param[p] if is_parameter(p) else p

        objective_terms = []
        constraint_from_name = {}
        for index, fact in enumerate(facts):
            prefix, args = fact[0], fact[1:]
            name = str(index)
            if prefix == 'kin':
                kinematics_constraint(model, name, *map(get_var, args))
            elif prefix == 'contained':
                contained_constraint(model, regions, name, *map(get_var, args))
            elif prefix == 'cfree' and collisions:
                collision_constraint(model, name, *map(get_var, args))
            elif prefix == 'motion':
                #motion_constraint(model, name, *map(get_var, args))
                raise NotImplementedError()
            elif prefix == NOT:
                fact = args[0]
                predicate, args = fact[0], fact[1:]
                if predicate == 'posecollision' and collisions:
                    collision_constraint(model, name, *map(get_var, args))
            elif prefix == MINIMIZE:
                fact = args[0]
                func, args = fact[0], fact[1:]
                if func == 'distance':
                    objective_terms.extend(distance_cost(*map(get_var, args)))
                continue
            constraint_from_name[name] = fact

        for out, value in hint.items():
            for var, coord in zip(get_var(out), value):
                var.start = coord

        model.setObjective(quicksum(objective_terms), sense=GRB.MINIMIZE)
        #m.write("file.lp")
        model.optimize()
        # https://www.gurobi.com/documentation/7.5/refman/optimization_status_codes.html
        if model.status in (GRB.INFEASIBLE,
                            GRB.INF_OR_UNBD):  # OPTIMAL | SUBOPTIMAL
            if not diagnose:
                return OptimizerOutput()
            constraint_indices = {
                i
                for i, term in enumerate(facts) if term[0] != MINIMIZE
            }
            #infeasible = constraint_indices
            #infeasible = compute_inconsistent(model)
            #infeasible = deletion_filter(model, constraint_indices)
            infeasible = elastic_filter(model, constraint_indices)
            infeasible_facts = [facts[index] for index in sorted(infeasible)]
            print('Inconsistent:', infeasible_facts)
            return OptimizerOutput(infeasible=[infeasible])
        assignment = tuple(value_from_var(get_var(out)) for out in outputs)
        return OptimizerOutput(assignments=[assignment])
Esempio n. 4
0
    def fn(outputs, facts, hint={}):
        # TODO: pass in the variables and constraint streams instead?
        # The true test is placing two blocks in a tight region obstructed by one
        constraint_indices = {
            i
            for i, term in enumerate(facts) if term[0] != MINIMIZE
        }
        positive, negative, costs = partition_facts(facts)
        #print('Parameters:', outputs)
        #print('Constraints:', positive + negative)
        if costs:
            print('Costs:', costs)

        # https://github.com/yijiangh/coop_assembly/blob/e52abef7c1cfb1d3e32691d163abc85dd77f27a2/src/coop_assembly/geometry_generation/caelan.py
        model = Model(name='TAMP')
        model.setParam(GRB.Param.OutputFlag, verbose)
        model.setParam(GRB.Param.TimeLimit, max_time)
        model.setParam(GRB.Param.Cutoff,
                       GRB.INFINITY)  # TODO: account for scaling
        #if num_solutions < INF:
        #    model.setParam(GRB.Param.SolutionLimit, num_solutions)

        # Limit how many solutions to collect
        #model.setParam(GRB.Param.PoolSolutions, 2)
        # Limit the search space by setting a gap for the worst possible solution that will be accepted
        #model.setParam(GRB.Param.PoolGap, 0.10) # PoolGapAbs
        # do a systematic search for the k-best solutions
        #model.setParam(GRB.Param.PoolSearchMode, 2) # 0 | 1 | 2
        # https://www.gurobi.com/documentation/9.1/examples/poolsearch_py.html#subsubsection:poolsearch.py

        ##########

        # TODO: remove anything that's just a domain condition?
        variable_indices = {}
        var_from_param = {}
        for index, fact in enumerate(facts):
            prefix, args = fact[0], fact[1:]
            if prefix == 'conf':
                param, = args
                if is_parameter(param):
                    var_from_param[param] = np_var(model,
                                                   lower=lower,
                                                   upper=upper)
            elif prefix == 'pose':
                _, param = args
                if is_parameter(param):
                    var_from_param[param] = np_var(model,
                                                   lower=lower,
                                                   upper=upper)
            elif prefix == 'grasp':  # TODO: iterate over combinations
                _, param = args
                if is_parameter(param):
                    var_from_param[param] = GRASP
            elif prefix == 'traj':
                raise NotImplementedError()
                #param, = args
                #if param not in var_from_id:
                #    var_from_id[id(param)] = [np_var(model), np_var(model)]
            else:
                continue
            variable_indices[index] = fact
        dimension = sum(len(var) for var in var_from_param.values())

        def get_var(p):
            return var_from_param[p] if is_parameter(p) else p

        ##########

        codimension = 0
        objective_terms = [
        ]  # TODO: could make a variable to impose a cost constraint
        constraint_from_name = {}
        for index, fact in enumerate(facts):
            prefix, args = fact[0], fact[1:]
            name = str(index)
            if prefix == 'kin':
                kinematics_constraint(model, name, *map(get_var, args))
                codimension += 2
            elif prefix in ('contain', 'contained'):
                contained_constraint(model, regions, name, *map(get_var, args))
                codimension += 1
            elif prefix == 'cfree' and collisions:
                # TODO: drop collision constraints until violated
                collision_constraint(model, name, *map(get_var, args))
            elif prefix == 'motion':
                #motion_constraint(model, name, *map(get_var, args))
                raise NotImplementedError()
            elif prefix == NOT:
                fact = args[0]
                predicate, args = fact[0], fact[1:]
                if predicate == 'posecollision' and collisions:
                    collision_constraint(model, name, *map(get_var, args))
            elif prefix == MINIMIZE:
                fact = args[0]
                func, args = fact[0], fact[1:]
                if func in ('dist', 'distance'):
                    objective_terms.extend(
                        distance_cost(model, *map(get_var, args)))
                continue
            constraint_from_name[name] = fact
        model.update()

        ##########

        #linear_model = model
        linear_model = copy_model(model)
        #linear_model = Model(name='Linear TAMP')

        # TODO: prune linearly dependent constraints
        linear_constraints = {
            c
            for c in linear_model.getConstrs() if c.sense == GRB.EQUAL
        }
        codimension = len(linear_constraints)
        # TODO: account for v.LB == v.UB
        #linear_variables = {v for v in linear_model.getVars() if v.VType == GRB.CONTINUOUS}
        #print(vars_from_expr(linear_model.getObjective()))
        linear_variables = set()
        for c in linear_constraints:
            linear_variables.update(vars_from_expr(linear_model.getRow(c)))
        linear_variables = sorted(linear_variables, key=lambda v: v.VarName)
        dimension = len(linear_variables)

        print('{} variables (dim={}): {}'.format(
            len(variable_indices), dimension,
            [facts[index] for index in sorted(variable_indices)]))
        nontrivial_indices = set(constraint_indices) - set(
            variable_indices)  # TODO: rename
        print('{} constraints: (codim={}): {}'.format(
            len(nontrivial_indices), codimension,
            [facts[index] for index in sorted(nontrivial_indices)]))

        # # https://en.wikipedia.org/wiki/Linear_subspace
        # # TODO: Equations for a subspace
        # #for c in model.getConstrs():
        # #    if c.sense != GRB.EQUAL:
        # #        model.remove(c)
        # variables = [model.getVarByName(v.VarName) for v in linear_variables]
        # lower_bound = np.array([v.LB for v in variables])
        # upper_bound = np.array([v.UB for v in variables])
        # center = (lower_bound + upper_bound) / 2.
        # extent = (upper_bound - lower_bound) / 2.
        # radius = np.linalg.norm(extent) # sphere
        #
        # point = radius*sample_sphere(dimension) + center
        # #point = center
        # basis = [sample_sphere_surface(dimension) for _ in range(codimension)]
        # #basis = [np.ones(dimension)]
        # multipliers = [unbounded_var(model) for _ in basis]
        # subspace_constraints = []
        # for i in range(dimension):
        #     combination = sum([m*b[i] for m, b in zip(multipliers, basis)])
        #     subspace_constraints.append(model.addConstr(variables[i] - point[i] == combination))
        # #for c in subspace_constraints:
        # #    model.remove(c)

        # TODO: generator version
        # for v in set(linear_model.getVars()) - linear_variables:
        #     linear_model.remove(v)
        # for c in set(linear_model.getConstrs()) - linear_constraints:
        #     linear_model.remove(c)
        # linear_model.setObjective(quicksum(sample_targets(linear_model, linear_variables)), sense=GRB.MINIMIZE)
        # linear_model.optimize()
        # for v in linear_variables: # Projection method
        #     set_value(model.getVarByName(v.VarName), v.X)

        ##########

        # TODO: normalize cost relative to the best cost for a trade-off
        # TODO: increasing bound on deterioration in quality
        weight = 0
        if weight > 0:
            primary_variables = {
                v
                for var in var_from_param.values() for v in var
            }
            objective_terms.extend(
                weight * term
                for term in sample_targets(model, primary_variables))
        model.setObjective(
            quicksum(objective_terms),
            sense=GRB.MINIMIZE)  # (1-weight) * quicksum(objective_terms)

        for out, value in hint.items():
            for var, coord in zip(get_var(out), value):
                # https://www.gurobi.com/documentation/9.1/refman/varhintval.html#attr:VarHintVal
                set_guess(var, coord, hard=hard)
                #set_value(var, coord)

        ##########

        #m.write("file.lp")
        model.optimize()
        # https://www.gurobi.com/documentation/7.5/refman/optimization_status_codes.html
        #if model.status in (GRB.INFEASIBLE, GRB.INF_OR_UNBD, GRB.CUTOFF): # OPTIMAL | SUBOPTIMAL
        if model.SolCount == 0:
            if diagnostic is None:
                return OptimizerOutput()
            elif diagnostic == 'all':
                #infeasible = constraint_indices
                infeasible = nontrivial_indices
            elif diagnostic == 'deletion':
                infeasible = deletion_filter(model, constraint_indices)
            elif diagnostic == 'elastic':
                infeasible = elastic_filter(model, constraint_indices)
            elif diagnostic == 'gurobi':
                infeasible = compute_inconsistent(model)
            else:
                raise NotImplementedError(diagnostic)
            print('Inconsistent:',
                  [facts[index] for index in sorted(infeasible)])
            return OptimizerOutput(infeasible=[infeasible])

            #expr.getValue() # TODO: store expressions and evaluate value
        # for c in model.getConstrs():
        #     print(c, c.Slack, c.RHS)
        #     print(c.__dict__)
        #     print(dir(c))

        ##########

        print(
            'Solved: {} | Objective: {:.3f} | Solutions: {} | Status: {} | Runtime: {:.3f}'
            .format(True, model.ObjVal, model.SolCount, model.status,
                    model.runtime))

        if costs and diagnose_cost:
            infeasible = deletion_filter(model,
                                         constraint_indices,
                                         max_objective=model.ObjVal - 1e-6)
        else:
            # TODO: propagate automatically to optimizer
            #infeasible = constraint_indices
            infeasible = nontrivial_indices
        print('Cost inconsistent:',
              [facts[index] for index in sorted(infeasible)])

        # variables = list(var_from_param.values())
        # for index, solution in enumerate(sample_solutions(model, variables, num_samples=15)):
        #    print(index, solution)

        assignment = tuple(value_from_var(get_var(out)) for out in outputs)
        return OptimizerOutput(assignments=[assignment],
                               infeasible=[infeasible])