Exemple #1
0
    def _get_probabilities_by_solving_eq_system(self, succ_dist_without_target_states, vars):

        self.opt_solver.push()
        #TODO is this correct?
        lhs_sum = Sum([
                    self.oracle.get_oracle_value(succ_id_2) for
                    (succ_id_2, _) in succ_dist_without_target_states
                ])
        rhs_sum = Sum([
                    vars[succ_id_2] for
                    (succ_id_2, _) in succ_dist_without_target_states
                ])

        def _multiply(left,right):
            args = (Ast * 2)()
            args[0] = left.as_ast()
            args[1] = right.as_ast()
            return ArithRef(Z3_mk_mul(left.ctx.ref(), 2, args), left.ctx)


        for (succ_id, prob) in succ_dist_without_target_states:
            # opt_var is the absolute value of the ratio
            lhs = _multiply(vars[succ_id], lhs_sum)
            rhs = _multiply(self.oracle.get_oracle_value(succ_id), rhs_sum)
            equality = lhs == rhs
            self.opt_solver.add(equality)

        if self.opt_solver.check() == sat:
            return True

        else:
            self.opt_solver.pop()
            return False
    def solve_with(self, input_image, output_index):
        x = RealVector('x', len(input_image[0]))
        for i in range(len(input_image[0])):
            if i >= self.pixels_to_change:
                # x[i] = image[i]
                self.solver.add(x[i] == input_image[0][i].item())
            else:
                # 0 <= x[i] <= 1
                self.solver.add(x[i] >= 0)
                self.solver.add(x[i] <= 1)

        fc1_weights = fetch_weights(1)
        fc1_shape = fc1_weights.size()
        # o1 = fc1^T * x
        o1 = [
            Sum([fc1_weights[i, j].item() * x[j] for j in range(fc1_shape[1])])
            for i in range(fc1_shape[0])
        ]

        # y1 = ReLU(o1)
        y1 = [If(o1[i] > 0, o1[i], 0) for i in range(fc1_shape[0])]

        fc2_weights = fetch_weights(2)
        fc2_shape = fc2_weights.size()
        # y2 = fc2^T * y1
        y2 = [
            Sum([
                fc2_weights[i, j].item() * y1[j] for j in range(fc2_shape[1])
            ]) for i in range(fc2_shape[0])
        ]

        # If any y2 output is higher than the expected y2,
        # the model output changes
        self.solver.add(
            Or([
                y2[output_index] < y2[i] for i in range(len(y2))
                if i != output_index
            ]))
        # self.solver.add(And([y2[output_index] > y2[i]
        #                      for i in range(len(y2)) if i != output_index]))

        # Check if the classification can change
        check = self.solver.check()
        sat = str(check) == 'sat'

        if sat:
            m = self.solver.model()
            # Substitute the model back in
            x_new = input_image.clone().detach()
            for i in range(self.pixels_to_change):
                x_new[0][i] = model_to_val(m, x[i])
            for i in range(self.pixels_to_change, len(input_image[0])):
                x_new[0][i] = input_image[0][i]
            return x_new
        elif str(check) == 'unknown':
            return 'timeout'
        else:
            return 'unsat'
Exemple #3
0
def main():
    """Star Battle solver example."""
    sym = grilops.SymbolSet([("EMPTY", " "), ("STAR", "*")])
    lattice = grilops.get_rectangle_lattice(HEIGHT, WIDTH)
    sg = grilops.SymbolGrid(lattice, sym)

    # There must be exactly two stars per column.
    for y in range(HEIGHT):
        sg.solver.add(
            Sum(*[
                If(sg.cell_is(Point(y, x), sym.STAR), 1, 0)
                for x in range(WIDTH)
            ]) == 2)

    # There must be exactly two stars per row.
    for x in range(WIDTH):
        sg.solver.add(
            Sum(*[
                If(sg.cell_is(Point(y, x), sym.STAR), 1, 0)
                for y in range(HEIGHT)
            ]) == 2)

    # There must be exactly two stars per area.
    area_cells = defaultdict(list)
    for y in range(HEIGHT):
        for x in range(WIDTH):
            area_cells[AREAS[y][x]].append(sg.grid[Point(y, x)])
    for cells in area_cells.values():
        sg.solver.add(Sum(*[If(c == sym.STAR, 1, 0) for c in cells]) == 2)

    # Stars may not touch each other, not even diagonally.
    for y in range(HEIGHT):
        for x in range(WIDTH):
            p = Point(y, x)
            sg.solver.add(
                Implies(
                    sg.cell_is(p, sym.STAR),
                    And(*[
                        n.symbol == sym.EMPTY
                        for n in sg.vertex_sharing_neighbors(p)
                    ])))

    if sg.solve():
        sg.print()
        if sg.is_unique():
            print("Unique solution")
        else:
            print("Alternate solution")
            sg.print()
    else:
        print("No solution")
Exemple #4
0
    def generate_imperative(self,production, d, initial_production = None):
        """provide a tuple that evaluates, scores, and prints a production"""
        if d == 0: # can't do any more work
            return (lambda state,i: []),0,(lambda m: "")

        # Generate programs of depth 1 resolving to p
        p = initial_production if initial_production else production
        firstEvaluate, firstMDL, firstShow, firstConstraints = self.generate(p,1)

        # generate programs of depth d-1 resolving to production
        restEvaluate, restMDL, restShow, restConstraints  = \
            self.generate_imperative(production, d-1)

        def evaluate(state,i):
            first_output, first_state = firstEvaluate((state,i))
            rest_output = restEvaluate(first_state,i)
            return [first_output]+rest_output

        mdl = self.values("r")

        def show(m):
            return firstShow(m) + "\n" + restShow(m)

        self.constraints += firstConstraints
        self.constraints += restConstraints
        self.constraints.append(mdl == Sum(firstMDL, restMDL))

        return evaluate,mdl,show
Exemple #5
0
 def create_objective(self) -> bool:
     """create optimization objectives"""
     # in case of a single value to optimize
     if self.is_multi_objective_optimization_problem:
         # Replace objectives O_i, O_j, O_k with
         # O = WiOi+WjOj+WkOk etc.
         equivalent_single_objective = Int("EquivalentSingleObjective")
         weighted_objectives = []
         for obj in self.problem_context.objectives:
             variable_to_optimize = obj.target
             weight = obj.weight
             if isinstance(obj, MaximizeObjective):
                 weighted_objectives.append(-weight * variable_to_optimize)
             else:
                 weighted_objectives.append(weight * variable_to_optimize)
         self.add_constraint(
             equivalent_single_objective == Sum(weighted_objectives))
         # create an indicator
         equivalent_indicator = Indicator("EquivalentIndicator",
                                          equivalent_single_objective)
         self.objective = MinimizeObjective("EquivalentObjective",
                                            equivalent_indicator)
         self.add_constraint(equivalent_indicator.get_assertions())
     else:
         self.objective = self.problem.context.objectives[0]
def cost(repository: PackageGroup, from_state: EncodedState,
         to_state: EncodedState) -> BoolRef:
    transition = map(flatten, zip(from_state.items(), to_state.items()))
    return Sum([
        __cost(from_bool, to_bool, repository[from_package].size)
        for from_package, from_bool, _, to_bool in transition
    ])
def total_cost(states: List[EncodedState],
               repository: PackageGroup) -> BoolRef:
    logging.info('cost constraint')
    costs = [
        cost(repository, from_state, to_state)
        for from_state, to_state in logging_tqdm(neighbours(states))
    ]
    return Sum(costs)
Exemple #8
0
    def cover(self, n):
        # If a vertex j is in the bag, it must be covered:
        # assert (=> arc_ij  (>= (+ weight_j_e2 weight_j_e5 weight_j_e7 ) 1) )
        # TODO: double-check the iterator over i
        logging.info('Vertex in bag -> covered')
        logging.debug("Edges %s" % self.hypergraph.edges())
        for i in range(1, n + 1):
            for j in range(1, n + 1):
                if i == j:
                    continue

                # TODO: add i>j
                logging.debug("i=%s, j=%s" % (i, j))
                logging.debug("edges: %s" % self.hypergraph.edges())

                # arc_ij then j must be covered by some edge (because j will end up in one bag)
                weights = []
                C = []
                for e in self.hypergraph.incident_edges(j):
                    logging.debug(" i=%s, j=%s, e=%s" % (i, j, e))
                    C.append(self.weight[i][e])
                    weights.append("weight_{i}_e{e}".format(i=i, e=e))

                C = [self.literal(x) for x in C]
                f = Implies(self.literal(self.arc[i][j]), (Sum(C) >= 1.0))
                logging.debug(" Assertation %s" % f)
                self.__solver.add(f)
                self.stream.write(
                    "(assert (=> arc_{i}_{j} (>= (+ {weights}) 1)))\n".format(
                        i=i, j=j, weights=" ".join(weights)))

                # arc_ij then i most be covered by some edge (because i will end up in one bag)
                weights = []
                C = []
                for e in self.hypergraph.incident_edges(i):
                    logging.debug(" i=%s, j=%s, e=%s" % (i, j, e))
                    C.append(self.weight[i][e])
                    weights.append("weight_{i}_e{e}".format(i=i, e=e))

                C = [self.literal(x) for x in C]
                f = (Sum(C) >= 1.0)
                logging.debug(" Assertation %s" % f)

                self.__solver.add(f)
                self.stream.write("(assert (>= (+ {weights}) 1))\n".format(
                    weights=" ".join(weights)))
Exemple #9
0
def _analyze_state(state):
    instruction = state.get_current_instruction()
    node = state.node

    if instruction["opcode"] != "CALL":
        return []

    call_value = state.mstate.stack[-3]
    target = state.mstate.stack[-2]

    eth_sent_total = BitVecVal(0, 256)

    constraints = copy(node.constraints)

    for tx in state.world_state.transaction_sequence:
        if tx.caller == 0xDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEF:

            # There's sometimes no overflow check on balances added.
            # But we don't care about attacks that require more 2^^256 ETH to be sent.

            constraints += [
                BVAddNoOverflow(eth_sent_total, tx.call_value, False)
            ]
            eth_sent_total = Sum(eth_sent_total, tx.call_value)
    constraints += [
        UGT(call_value, eth_sent_total), target == state.environment.sender
    ]

    try:

        transaction_sequence = solver.get_transaction_sequence(
            state, constraints)

        debug = str(transaction_sequence)

        issue = Issue(
            contract=node.contract_name,
            function_name=node.function_name,
            address=instruction["address"],
            swc_id=UNPROTECTED_ETHER_WITHDRAWAL,
            title="Ether thief",
            _type="Warning",
            bytecode=state.environment.code.bytecode,
            description=
            "Arbitrary senders other than the contract creator can withdraw ETH from the contract"
            +
            " account without previously having sent an equivalent amount of ETH to it. This is likely to be"
            + " a vulnerability.",
            debug=debug,
            gas_used=(state.mstate.min_gas_used, state.mstate.max_gas_used),
        )
    except UnsatError:
        logging.debug("[ETHER_THIEF] no model found")
        return []

    return [issue]
Exemple #10
0
    def target_cells_use_all_available_pieces(self, board, pieces):
        constraints = []
        for (piece, quantity) in collections.Counter(pieces).items():
            constraints.append(
                Sum([
                    If(cell == piece, 1, 0) for (_, _, value, cell) in board
                    if self.is_cell_empty(value)
                ]) == quantity)

        return constraints
Exemple #11
0
    def add_indicator_number_tasks_assigned(self, resource: Resource):
        """compute the number of tasks as resource is assigned"""
        # this list contains
        scheduled_tasks = [
            If(start > -1, 1, 0) for start, end in resource.busy_intervals.values()
        ]

        nb_tasks_assigned_indicator_variable = Sum(scheduled_tasks)
        return Indicator(
            "Nb Tasks Assigned (%s)" % resource.name,
            nb_tasks_assigned_indicator_variable,
        )
Exemple #12
0
    def add_indicator_resource_utilization(self, resource: Resource) -> Indicator:
        """Compute the total utilization of a single resource.

        The percentage is rounded to an int value.
        """
        durations = [
            interv_up - interv_low
            for interv_low, interv_up in resource.busy_intervals.values()
        ]
        utilization = (Sum(durations) * 100) / self.horizon  # in percentage
        return Indicator(
            "Utilization (%s)" % resource.name, utilization, bounds=(0, 100)
        )
Exemple #13
0
 def add_objective_priorities(self, weight=1) -> Union[ArithRef, Indicator]:
     """optimize the solution such that all task with a higher
     priority value are scheduled before other tasks"""
     all_priorities = []
     for task in self.context.tasks:
         if task.optional:
             all_priorities.append(task.end * task.priority * task.scheduled)
         else:
             all_priorities.append(task.end * task.priority)
     priority_sum = Sum(all_priorities)
     priority_indicator = Indicator("PriorityTotal", priority_sum)
     MinimizeObjective("", priority_indicator, weight)
     return priority_indicator
Exemple #14
0
 def add_objective_flowtime(self, weight=1) -> Union[ArithRef, Indicator]:
     """the flowtime is the sum of all ends, minimize. Be carful that
     it is contradictory with makespan"""
     task_ends = []
     for task in self.context.tasks:
         if task.optional:
             task_ends.append(task.end * task.scheduled)
         else:
             task_ends.append(task.end)
     flow_time_expr = Sum(task_ends)
     flow_time = Indicator("FlowTime", flow_time_expr)
     MinimizeObjective("", flow_time, weight)
     return flow_time
Exemple #15
0
 def sum_expr(self, expr):
     newList = [i.convert(self) for i in expr.list]
     
     (hit, result) = self.checkCache("Sum", newList)
     if hit:   
         return result
     else:
         try:
             ret = Sum(*newList)
         except:
             ret = sum(newList)
         return self.cache(ret, result)
         
     return self.cache(ret, "Sum", newList)
Exemple #16
0
def part_2(nanobots: List[Nanobot]) -> int:
    x, y, z = Ints("x y z")
    opt = Optimize()
    bot_cond = []
    for i, bot in enumerate(nanobots):
        cond = Int(f"bot_{i}")
        bot_cond.append(cond)
        opt.add(cond == If(z_manhattan(x, y, z, bot.point) <= bot.r, 1, 0))
    overlaps = Sum(bot_cond)
    dist_zero = Int('dist_zero')
    opt.add(dist_zero == z_manhattan(x, y, z, Point(0, 0, 0)))
    _ = opt.maximize(overlaps)
    dist = opt.maximize(dist_zero)
    opt.check()
    return dist.upper()
Exemple #17
0
def killer_sudoku(cages: List[str], cage_sum_grid: List[List[int]]) -> str:
    """Solver for Killer Sudoku minipuzzles."""
    sym = grilops.make_number_range_symbol_set(1, SIZE)
    sg = grilops.SymbolGrid(LATTICE, sym)
    shifter = Shifter(sg.solver)

    # Add normal sudoku constraints.
    for y in range(SIZE):
        sg.solver.add(Distinct(*[sg.grid[Point(y, x)] for x in range(SIZE)]))
    for x in range(SIZE):
        sg.solver.add(Distinct(*[sg.grid[Point(y, x)] for y in range(SIZE)]))
    for z in range(9):
        top = (z // 3) * 3
        left = (z % 3) * 3
        cells = [
            sg.grid[Point(y, x)] for y in range(top, top + 3)
            for x in range(left, left + 3)
        ]
        sg.solver.add(Distinct(*cells))

    # Build a map from each cage label to the cells within that cage.
    cage_cells = defaultdict(list)
    for p in LATTICE.points:
        cage_cells[cages[p.y][p.x]].append(sg.grid[p])

    # The digits used in each cage must be unique.
    for cells_in_cage in cage_cells.values():
        sg.solver.add(Distinct(*cells_in_cage))

    cage_sums = {}
    for p in LATTICE.points:
        cage_sum = cage_sum_grid[p.y][p.x]
        if cage_sum > 0:
            shifted_cage_sum = shifter.given(p, cage_sum)
            cage_label = cages[p.y][p.x]
            assert cage_label not in cage_sums
            cage_sums[cage_label] = shifted_cage_sum

    # Add constraints for cages with given sums.
    for cage_label, shifted_cage_sum in cage_sums.items():
        sg.solver.add(Sum(*cage_cells[cage_label]) == shifted_cage_sum)

    assert sg.solve()
    sg.print()
    print()
    shifter.print_shifts()
    print()
    return shifter.eval_binary()
Exemple #18
0
    def objective_function(self):
        """
        Objective function is a weighted combination of gate errors and decoherence errors
        """
        self.fidelity_terms = [self.gate_fidelity[gate] for gate in self.gate_fidelity]
        self.coherence_terms = []
        for q in self.qubit_lifetime:
            val = -self.qubit_lifetime[q]/min(self.bp_t1_time[q], self.bp_t2_time[q])
            self.coherence_terms.append(val)

        all_terms = []
        for item in self.fidelity_terms:
            all_terms.append(self.weight_factor*item)
        for item in self.coherence_terms:
            all_terms.append((1-self.weight_factor)*item)
        self.opt.maximize(Sum(all_terms))
Exemple #19
0
    def fractional_counters(self, m=None):
        n = self.hypergraph.number_of_nodes()

        logging.info("Counter for fractional covers value=%s" % m)
        for j in range(1, n + 1):
            C0 = []
            weights = []
            for e in self.hypergraph.edges():
                assert (e > 0)
                C0.append(self.weight[j][e])
                weights.append("weight_{j}_e{e}".format(j=j, e=e))

            # set optimization variable or value for SAT check
            C = [self.literal(x) for x in C0]
            f = (Sum(C) <= m)
            logging.debug("Assertation %s" % f)
            self.__solver.add(f)

            self.stream.write("(assert ( <= (+ {weights}) {m}))\n".format(
                weights=" ".join(weights), m=m))
Exemple #20
0
  def __add_constraints(self):
    """Add constraints to the region modeling grids."""
    def constrain_side(p, sp, sd):
      self.__solver.add(Implies(
          self.__parent_grid[p] == X,
          self.__parent_grid[sp] != sd
      ))
      self.__solver.add(Implies(
          self.__parent_grid[sp] == sd,
          And(
              self.__region_id_grid[p] == self.__region_id_grid[sp],
              self.__region_size_grid[p] == self.__region_size_grid[sp],
          )
      ))

    def subtree_size_term(sp, sd):
      return If(
          self.__parent_grid[sp] == sd,
          self.__subtree_size_grid[sp],
          0
      )

    for p in self.__lattice.points:
      parent = self.__parent_grid[p]
      subtree_size_terms = [If(parent != X, 1, 0)]

      for d in self.__lattice.edge_sharing_directions():
        sp = p.translate(d.vector)
        if sp in self.__parent_grid:
          opposite_index = self.__edge_sharing_direction_to_index[
              self.__lattice.opposite_direction(d)]
          constrain_side(p, sp, opposite_index)
          subtree_size_terms.append(subtree_size_term(sp, opposite_index))
        else:
          d_index = self.__edge_sharing_direction_to_index[d]
          self.__solver.add(parent != d_index)

      self.__solver.add(
          self.__subtree_size_grid[p] == Sum(*subtree_size_terms)
      )
Exemple #21
0
    def add_indicator_resource_cost(
        self, list_of_resources: List[Resource]
    ) -> Indicator:
        """compute the total cost of a set of resources"""
        partial_costs = []

        def get_resource_cost(res):
            p = []
            for interv_low, interv_up in res.busy_intervals.values():
                # Constant cost per period
                if isinstance(res.cost, ConstantCostPerPeriod):
                    # res.cost(interv_up), res.cost(interv_low)
                    # or res.cost.value give the same result because the function is constant
                    period_cost = res.cost(interv_up) * (interv_up - interv_low)
                    p.append(period_cost)
                # Polynomial cost. Compute the area of the trapeze
                if isinstance(res.cost, PolynomialCostFunction):
                    period_cost = (
                        (res.cost(interv_low) + res.cost(interv_up))
                        * (interv_up - interv_low)
                        / 2
                    )
                    p.append(period_cost)
            return p

        for resource in list_of_resources:
            if isinstance(resource, CumulativeWorker):
                for res in resource.cumulative_workers:
                    partial_costs.extend(get_resource_cost(res))
            else:  # for a single worker
                partial_costs.extend(get_resource_cost(resource))

        resource_names = ",".join([resource.name for resource in list_of_resources])
        cost_indicator_variable = Sum(partial_costs)
        cost_indicator = Indicator(
            "Total Cost (%s)" % resource_names, cost_indicator_variable
        )
        return cost_indicator
Exemple #22
0
Lady in Room I = True

That is, the lady is in Room I.

'''

from z3 import Bool, And, Or, Not, Sum, If, Solver

p = Bool('Lady in Room I')  # Lady in Room I
q = Bool('Lady in Room II')  # Lady in Room II
r = Bool('Lady in Room III')  # Lady in Room III

solver = Solver()
solver.add(
    # The lady is in only one of the three rooms
    Sum([If(b, 1, 0) for b in [p, q, r]]) == 1,
    # At most one statement is true
    Or(
        # Room I is true
        And(Not(p), Not(q), q),
        # Room II is true
        And(p, q, q),
        # Room III is true
        And(p, Not(q), Not(q)),
        # All are false
        And(p, Not(q), q)))

solver.check()
model = solver.model()
[print(var, '= True') for var in model.decls() if model[var] == True]
print('\nThat is, the lady is in Room I.')
Exemple #23
0
    def solve(self) -> BinaryPuzzle:
        # Constants.
        size = self.size()

        # Our z3 Solver.
        solver = Solver()

        # Our mapping between an (x, y) position in our puzzle and the symbolic
        # variable that z3 is going to use to represent the integer value in
        # our matrix.
        symbols = {(x, y): Int("({}, {})".format(x, y))
                   for x, y in self.positions()}

        # We will often have to work with either rows of symbolic variables or
        # columns of symbolic variables. We start with rows:
        rows = []

        for x in range(size):
            row = [symbols[(x, y)] for y in range(size)]
            rows.append(row)

        # We define a list of columns:
        columns = []

        for y in range(size):
            column = [symbols[(x, y)] for x in range(size)]
            columns.append(column)

        # We start by adding the values from our puzzle to the solver.
        for x, y in self.positions():
            value = self._puzzle[x][y]

            # We skip our None values since they are the values we are
            # interested in finding.
            if value is None:
                continue

            # This would be equivalent to assigning a value to a variable in
            # the non-symbolic world.
            solver.add(symbols[(x, y)] == value)

        # We can now begin to add constraints to our model. The first and most
        # simple constraint we add is that each cell must either contain a 0 or
        # a 1 value. No other values are interesting if we are searching for
        # the result.
        for symbol in symbols.values():
            solver.add(Or([symbol == 0, symbol == 1]))

        # The second constraint we add, is to ensure that each row have the
        # exact same amount of zeroes and ones. We can exploit the nature of
        # our matrix here because we know that each row and column contain an
        # even amount of items. This allows us to simply check if half of the
        # elements are zeros (and thus we imply that the other half of the
        # elements contains ones). This simplifies the problem into simply
        # being that the sum of each element in a row must be half the size of
        # our row:
        for row in rows:
            solver.add(Sum(row) == size // 2)

        # We do the same trick for our columns:
        for column in columns:
            solver.add(Sum(column) == size // 2)

        # We need to add a constraint to make sure that each of our rows and
        # columns are unique. There must be a smarter way to do this.
        # We start with the rows:
        solver.add(
            Not(
                Or([
                    And([a == b for a, b in zip(row_a, row_b)])
                    for row_a in rows for row_b in rows if row_a != row_b
                ])))

        # We repeat the same for columns:
        solver.add(
            Not(
                Or([
                    And([a == b for a, b in zip(column_a, column_b)])
                    for column_a in columns for column_b in columns
                    if column_a != column_b
                ])))

        # The last constraint we need to add only applies for puzzles that are
        # larger than 2x2:
        if size > 2:
            # We need to check that in each row and each column that no more
            # than two of the same numbers are next or below each other. We can
            # build an overlapping window of each triplet in a given row or
            # column to simplify this.
            #
            # This gives us the following possible values of our triplets:
            #
            #   (0, 0, 0) => Illegal.
            #   (0, 0, 1) => Legal.
            #   (0, 1, 0) => Legal.
            #   (0, 1, 1) => Legal.
            #   (1, 0, 0) => Legal.
            #   (1, 0, 1) => Legal.
            #   (1, 1, 0) => Legal.
            #   (1, 1, 1) => Illegal.
            #
            # My first intuition here solved this issue as following: we can
            # now identify that we cannot allow an overlapping window where the
            # sum of the 3 elements in the window is either 0 or 3:
            #
            # But a much simpler method is that given each triplet (a, b, c) to
            # check that none of the them satisfies And([a == b, b == c]).
            #
            # We can model this easily in z3. We start with the rows:
            for row in rows:
                # We create our overlapping windows.
                for i in range(size - 2):
                    i_end = i + 3
                    a, b, c = row[i:i_end]
                    solver.add(Not(And([a == b, b == c])))

            # We do the same thing to our columns:
            for column in columns:
                # We create our overlapping windows.
                for i in range(size - 2):
                    i_end = i + 3
                    a, b, c = column[i:i_end]
                    solver.add(Not(And([a == b, b == c])))

        # Check if our solver can find a solution.
        if solver.check() != sat:
            raise UnsolvablePuzzleError

        # Our model.
        model = solver.model()

        # Evaluate our model and print the resulting grid.
        result = {
            position: model.evaluate(symbol)
            for position, symbol in symbols.items()
        }

        # Return our result in the same format as our input.
        return BinaryPuzzle([[result[(x, y)].as_long() for y in range(size)]
                             for x in range(size)])
    def __init__(
        self,
        resource,
        dict_time_intervals_and_bound,
        kind: Optional[str] = "max",
        optional: Optional[bool] = False,
    ) -> None:
        """WorkLoad constraints can be used to restrict the number tasks which are executed during a certain time period.
        The resource can be a single Worker or a CumulativeWorker.

        The list of time_intervals is a dict such as:
        [(1,20):6, (50,60):2] which means: in the interval (1,20), the resource might not use
        more than 6 slots. And no more than 2 time slots in the interval (50, 60)

        kind: optional string, default to 'max', can be 'min' or 'exact'
        """
        super().__init__(optional)

        if kind not in ["exact", "max", "min"]:
            raise ValueError("kind must either be 'exact', 'min' or 'max'")

        if isinstance(resource, Worker):
            workers = [resource]
        elif isinstance(resource, CumulativeWorker):
            workers = resource.cumulative_workers

        for time_interval in dict_time_intervals_and_bound:
            number_of_time_slots = dict_time_intervals_and_bound[time_interval]

            time_interval_lower_bound, time_interval_upper_bound = time_interval

            durations = []

            for worker in workers:
                # for this task, the logic expression is that any of its start or end must be
                # between two consecutive intervals
                for start_task_i, end_task_i in worker.get_busy_intervals():
                    # this variable allows to compute the occupation
                    # of the resource during the time interval
                    dur = Int("Overlap_%i_%i_%s" % (
                        time_interval_lower_bound,
                        time_interval_upper_bound,
                        uuid.uuid4().hex[:8],
                    ))
                    # prevent solutions where duration would be negative
                    self.set_assertions(dur >= 0)
                    # 4 different cases to take into account
                    cond1 = And(
                        start_task_i >= time_interval_lower_bound,
                        end_task_i <= time_interval_upper_bound,
                    )
                    asst1 = Implies(cond1, dur == end_task_i - start_task_i)
                    self.set_assertions(asst1)
                    # overlap at lower bound
                    cond2 = And(
                        start_task_i < time_interval_lower_bound,
                        end_task_i > time_interval_lower_bound,
                    )
                    asst2 = Implies(
                        cond2, dur == end_task_i - time_interval_lower_bound)
                    self.set_assertions(asst2)
                    # overlap at upper bound
                    cond3 = And(
                        start_task_i < time_interval_upper_bound,
                        end_task_i > time_interval_upper_bound,
                    )
                    asst3 = Implies(
                        cond3, dur == time_interval_upper_bound - start_task_i)
                    self.set_assertions(asst3)
                    # all overlap
                    cond4 = And(
                        start_task_i < time_interval_lower_bound,
                        end_task_i > time_interval_upper_bound,
                    )
                    asst4 = Implies(
                        cond4,
                        dur == time_interval_upper_bound -
                        time_interval_lower_bound,
                    )
                    self.set_assertions(asst4)

                    # make these constraints mutual: no overlap
                    self.set_assertions(
                        Implies(Not(Or([cond1, cond2, cond3, cond4])),
                                dur == 0))

                    # finally, store this variable in the duratins list
                    durations.append(dur)

            # workload constraint depends on the kind
            if kind == "exact":
                wl_constrt = Sum(durations) == number_of_time_slots
            elif kind == "max":
                wl_constrt = Sum(durations) <= number_of_time_slots
            elif kind == "min":
                wl_constrt = Sum(durations) >= number_of_time_slots

            self.set_assertions(wl_constrt)
Exemple #25
0
# Bomb placement
def at_least_one_bomb_for_each_rectangle(h, w):
    return [
        PbGe([(is_bomb[r + dr][c + dc], 1) for (dr, dc) in rectangle(h, w)], 1)
        for (r, c) in rectangle(H - h + 1, W - w + 1)
    ]


# Clauses
s = Optimize()
s.add(at_least_one_bomb_for_each_rectangle(2, 3))
s.add(at_least_one_bomb_for_each_rectangle(3, 2))

# Objective
num_bombs = Sum([If(is_bomb[r][c], 1, 0) for (r, c) in rectangle(H, W)])
min_bombs = s.minimize(num_bombs)

if s.check() == sat:
    assert s.lower(min_bombs) == 6
    print("The minimum number of bombs satisfying the constraints == %s." %
          s.lower(min_bombs))
    print(diagram(s.model()))
else:
    print("Z3 failed to find a solution.")

# http://forum.stratego.com/topic/1134-stratego-quizz-and-training-forum/?p=11661
# http://forum.stratego.com/topic/1146-stratego-quizz-and-training-forum-answers/?p=11813
# http://forum.stratego.com/topic/1134-stratego-quizz-and-training-forum/?p=441745
print(
    "The minimum number of bombs on a Stratego setup area such that each 2x3, 3x2 and 1x6 rectangle has at least one bomb."
Exemple #26
0
def Count(boolvec):
    return Sum(*(If(i, 1, 0) for i in boolvec))
Exemple #27
0
def SMT_general_model(instance):
    # --------------------------
    #         PARAMETERS
    # --------------------------

    # Define dimensions parameters
    x = 0
    y = 1

    # Define wrapping paper roll height and width
    roll_width = instance['roll_width']
    roll_height = instance['roll_height']

    # Define wrapping paper roll coordinates
    X_COORDINATES = range(roll_width)
    Y_COORDINATES = range(roll_height)

    # Define the number of pieces to cut
    n_pieces = instance['n_pieces']

    # Define pieces as a set of integers from 0 to num. of presents-1
    PIECES = range(n_pieces)

    # Define the dimensions of the pieces to cut
    pieces_dimensions = instance['pieces_dimensions']

    # Define lower and upper bounds for the dimensions
    lower_bounds = [0, 0]
    upper_bounds = [roll_width, roll_height]

    # --------------------------
    #         VARIABLES
    # --------------------------

    # DECISION VARIABLES

    # Define bottom-left corner of the pieces of paper to cut as 2-D (width-height) array of int decision variables
    pieces_corners = [[Int("x_%s" % i), Int("y_%s" % i)] for i in PIECES]

    # Define rotation property for the pieces of paper
    pieces_rotation = [Bool("rotation_%s" % i) for i in PIECES]

    # --------------------------
    #         FUNCTIONS
    # --------------------------

    # Function to obtain the width and height of a piece of paper based on their positioning (if rotated or not)
    def get_dimension(i, axis):
        if axis == x:
            return If(pieces_rotation[i], pieces_dimensions[i][y],
                      pieces_dimensions[i][x])
        else:
            return If(pieces_rotation[i], pieces_dimensions[i][x],
                      pieces_dimensions[i][y])

    # --------------------------
    #        CONSTRAINTS
    # --------------------------

    # DOMAIN CONSTRAINTS: reduce the domain for the bottom-left corners of the pieces of paper

    # The cut can not be done outside the paper roll: the bottom-left corner coordinates of the pieces of paper to cut
    # must not exceed the paper roll coordinates limit, considering also the dimension of the piece of paper
    domain_bound_constraints = [
        And(
            And(pieces_corners[i][x] >= lower_bounds[x],
                pieces_corners[i][x] <= upper_bounds[x] - get_dimension(i, x)),
            And(pieces_corners[i][y] >= lower_bounds[y],
                pieces_corners[i][y] <= upper_bounds[y] - get_dimension(i, y)))
        for i in PIECES
    ]

    # IMPLIED CUMULATIVE CONSTRAINTS: define the maximum number of usable paper

    # The maximum usable quantity of paper is defined by the paper roll dimensions
    cumulative_constraints = [
        Sum([
            If(
                And(y_coord >= pieces_corners[i][y],
                    y_coord < pieces_corners[i][y] + get_dimension(i, y)),
                get_dimension(i, x), 0) for i in PIECES
        ]) == roll_width for y_coord in Y_COORDINATES
    ] + [
        Sum([
            If(
                And(x_coord >= pieces_corners[i][x],
                    x_coord < pieces_corners[i][x] + get_dimension(i, x)),
                get_dimension(i, y), 0) for i in PIECES
        ]) == roll_height for x_coord in X_COORDINATES
    ]

    # NON-OVERLAPPING CONSTRAINT: define the non-overlapping property fo the pieces of paper

    # The cutted pieces of paper must not overlap: the bottom-left corner coordinates must not be equal to other
    # coordinates of the paper roll which are already occupied by other pieces of paper

    non_overlapping_constraints = [
        Or(pieces_corners[i][x] + get_dimension(i, x) <= pieces_corners[j][x],
           pieces_corners[i][y] + get_dimension(i, y) <= pieces_corners[j][y],
           pieces_corners[j][x] + get_dimension(j, x) <= pieces_corners[i][x],
           pieces_corners[j][y] + get_dimension(j, y) <= pieces_corners[i][y])
        for i in PIECES for j in PIECES if i < j
    ]

    # ORDERING CONSTRAINTS: define an ordering property for the pieces of paper which have the same dimension

    # The pieces of the same dimension must be ordered in order to reduce the number of solutions
    same_dimension_constraint = [
        lex_less(pieces_corners[i], pieces_corners[j]) for i in PIECES
        for j in PIECES
        if i < j and ((pieces_dimensions[i][x] == pieces_dimensions[j][y] and
                       pieces_dimensions[i][y] == pieces_dimensions[j][x]) or
                      (pieces_dimensions[i][x] == pieces_dimensions[j][x]
                       and pieces_dimensions[i][y] == pieces_dimensions[j][y]))
    ]

    # Same constraint for the problem where rotation of pieces is not an option
    # same_dimension_constraint = [lex_less(pieces_corners[i], pieces_corners[j])
    #                              for i in PIECES for j in PIECES if i < j and
    #                              pieces_dimensions[i] == pieces_dimensions[j]]

    # OPTIMIZATION CONSTRAINTS: constraint to speed up the search of solutions

    # If a piece is square (width == height), do not consider the solution with the piece rotated
    square_pieces_constraint = [
        Not(pieces_rotation[i]) for i in PIECES
        if pieces_dimensions[i][x] == pieces_dimensions[i][y]
    ]

    # --------------------------
    #          SOLUTION
    # --------------------------

    solver = Solver()
    solver.add(domain_bound_constraints + cumulative_constraints +
               non_overlapping_constraints + same_dimension_constraint +
               square_pieces_constraint)

    return solver, PIECES, pieces_corners, pieces_rotation
Exemple #28
0
    def refine_oracle_mdp(self, visited_states: Set[StateId]) -> Set[StateId]:

        self.statistics.inc_refine_oracle_counter()
        # First ensure progress
        if visited_states <= self.oracle_states:
            # Ensure progress by adding all non-target successors of states in oracle_states to the set (for every action)
            self.oracle_states = self.oracle_states.union({
                succ[0]
                for state_id in self.oracle_states for choice in
                self.state_graph.get_successors_filtered(state_id).choices
                for succ in choice.distribution if succ[0] != -1
            })

        else:
            self.oracle_states = self.oracle_states.union(visited_states)

        # TODO: A lot of optimization potential
        self.solver_mdp.push()

        # We need a variable for every oracle state
        variables = {
            state_id: Real("x_%s" % state_id)
            for state_id in self.oracle_states
        }

        # Set up EQ - System
        for state_id in self.oracle_states:
            for choice in self.state_graph.get_successors_filtered(
                    state_id).choices:
                self.solver_mdp.add(variables[state_id] >= Sum([
                    RealVal(1) *
                    prob if succ_id == -1 else  # Case succ_id target state
                    (
                        variables[succ_id] * prob if succ_id in
                        self.oracle_states else  # Case succ_id oracle state
                        self.get_oracle_value(succ_id) *
                        prob)  # Case sycc_id no target and no oracle state
                    for succ_id, prob in choice.distribution
                ]))

            self.solver_mdp.add(variables[state_id] >= RealVal(0))

        # Minimize value for initial state
        self.solver_mdp.minimize(
            variables[self.state_graph.get_initial_state_id()])

        if self.solver_mdp.check() == sat:

            m = self.solver_mdp.model()

            # update oracle
            for state_id in self.oracle_states:
                self.oracle[state_id] = m[variables[state_id]]

            logger.info("Refined oracle.")
            # logger.info(self.oracle)

            self.solver_mdp.pop()

            return self.oracle_states

        else:
            logger.error("Oracle solver unsat")
            raise RuntimeError("Oracle solver inconsistent.")
Exemple #29
0
    def refine_oracle_mc(self, visited_states: Set[StateId]) -> Set[StateId]:

        self.statistics.inc_refine_oracle_counter()
        # First ensure progress
        if visited_states <= self.oracle_states:
            # Ensure progress by adding all non-target successors of states in oracle_states to the set
            self.oracle_states = self.oracle_states.union({
                succ_id
                for state_id in self.oracle_states for succ_id, prob in
                self.state_graph.get_filtered_successors(state_id)
                if succ_id != -1
            })

        else:
            self.oracle_states = self.oracle_states.union(visited_states)

        # TODO: A lot of optimization potential
        self.solver.push()

        # We need a variable for every oracle state
        variables = {
            state_id: Real("x_%s" % state_id)
            for state_id in self.oracle_states
        }

        # Set up EQ - System
        for state_id in self.oracle_states:
            self.solver.add(variables[state_id] == Sum([
                RealVal(1) *
                prob if succ_id == -1 else  # Case succ_id target state
                (
                    variables[succ_id] * prob if succ_id in
                    self.oracle_states else  # Case succ_id oracle state
                    self.get_oracle_value(succ_id) *
                    prob)  # Case sycc_id no target and no oracle state
                for succ_id, prob in self.state_graph.get_filtered_successors(
                    state_id)
            ]))

            self.solver.add(variables[state_id] >= RealVal(0))

        #print(self.solver.assertions())

        if self.solver.check() == sat:

            m = self.solver.model()

            # update oracle
            for state_id in self.oracle_states:
                self.oracle[state_id] = m[variables[state_id]]

            logger.info("Refined oracle.")
            #logger.info(self.oracle)

            self.solver.pop()

            return self.oracle_states

        else:

            # The oracle solver is unsat. In this case, we solve the LP.
            self.solver.pop()

            self.statistics.refine_oracle_counter = self.statistics.refine_oracle_counter - 1

            return self.refine_oracle_mdp(visited_states)
Exemple #30
0
    def __init__(
        self,
        problem,
        debug: Optional[bool] = False,
        max_time: Optional[int] = 10,
        parallel: Optional[bool] = False,
        random_values: Optional[bool] = False,
        logics: Optional[str] = None,
        verbosity: Optional[int] = 0,
    ):
        """Scheduling Solver

        debug: True or False, False by default
        max_time: time in seconds, 60 by default
        parallel: True to enable mutlthreading, False by default
        """
        self.problem = problem
        self.problem_context = problem.context
        self.debug = debug
        # objectives list
        self.objective = None  # the list of all objectives defined in this problem
        self.current_solution = None  # no solution until the problem is solved

        # set_option('smt.arith.auto_config_simplex', True)
        if debug:
            set_option("verbose", 2)
        else:
            set_option("verbose", verbosity)

        if random_values:
            set_option("sat.random_seed", random.randint(1, 1e3))
            set_option("smt.random_seed", random.randint(1, 1e3))
            set_option("smt.arith.random_initial_value", True)
        else:
            set_option("sat.random_seed", 0)
            set_option("smt.random_seed", 0)
            set_option("smt.arith.random_initial_value", False)

        # set timeout
        self.max_time = max_time  # in seconds
        set_option("timeout", int(self.max_time * 1000))  # in milliseconds

        # create the solver
        print("Solver type:\n===========")

        # check if the problem is an optimization problem
        self.is_not_optimization_problem = len(
            self.problem_context.objectives) == 0
        self.is_optimization_problem = len(self.problem_context.objectives) > 0
        self.is_multi_objective_optimization_problem = (len(
            self.problem_context.objectives) > 1)
        # the Optimize() solver is used only in the case of a mutli-optimization
        # problem. This enables to choose the priority method.
        # in the case of a single objective optimization, the Optimize() solver
        # apperas to be less robust than the basic Solver(). The
        # incremental solver is then used.

        # see this url for a documentation about logics
        # http://smtlib.cs.uiowa.edu/logics.shtml
        if logics is None:
            self._solver = Solver()
            print("\t-> Standard SAT/SMT solver")
        else:
            self._solver = SolverFor(logics)
            print("\t-> SMT solver using logics", logics)
        if debug:
            set_option(unsat_core=True)

        if parallel:
            set_option("parallel.enable", True)  # enable parallel computation

        # add all tasks assertions to the solver
        for task in self.problem_context.tasks:
            self.add_constraint(task.get_assertions())
            self.add_constraint(task.end <= self.problem.horizon)

        # then process tasks constraints
        for constraint in self.problem_context.constraints:
            self.add_constraint(constraint)

        # process resources requirements
        for ress in self.problem_context.resources:
            self.add_constraint(ress.get_assertions())

        # process resource intervals
        for ress in self.problem_context.resources:
            busy_intervals = ress.get_busy_intervals()
            nb_intervals = len(busy_intervals)
            for i in range(nb_intervals):
                start_task_i, end_task_i = busy_intervals[i]
                for k in range(i + 1, nb_intervals):
                    start_task_k, end_task_k = busy_intervals[k]
                    self.add_constraint(
                        Or(start_task_k >= end_task_i,
                           start_task_i >= end_task_k))

        # process indicators
        for indic in self.problem_context.indicators:
            self.add_constraint(indic.get_assertions())

        # work amounts
        # for each task, compute the total work for all required resources"""
        for task in self.problem_context.tasks:
            if task.work_amount > 0.0:
                work_total_for_all_resources = []
                for required_resource in task.required_resources:
                    # work contribution for the resource
                    interv_low, interv_up = required_resource.busy_intervals[
                        task]
                    work_contribution = required_resource.productivity * (
                        interv_up - interv_low)
                    work_total_for_all_resources.append(work_contribution)
                self.add_constraint(
                    Sum(work_total_for_all_resources) >= task.work_amount)

        # process buffers
        for buffer in self.problem_context.buffers:
            #
            # create an array that stores the mapping between start times and
            # quantities. For example, if a start T1 starts at 2 and consumes
            # 8, and T3 ends at 6 and consumes 5 then the mapping array
            # will look like : A[2]=8 and A[6]=-5
            # SO far, no way to have the same start time at different inst
            buffer_mapping = Array("Buffer_%s_mapping" % buffer.name,
                                   IntSort(), IntSort())
            for t in buffer.unloading_tasks:
                self.add_constraint(buffer_mapping == Store(
                    buffer_mapping, t.start, -buffer.unloading_tasks[t]))
            for t in buffer.loading_tasks:
                self.add_constraint(buffer_mapping == Store(
                    buffer_mapping, t.end, +buffer.loading_tasks[t]))
            # sort consume/feed times in asc order
            tasks_start_unload = [t.start for t in buffer.unloading_tasks]
            tasks_end_load = [t.end for t in buffer.loading_tasks]

            sorted_times, sort_assertions = sort_no_duplicates(
                tasks_start_unload + tasks_end_load)
            self.add_constraint(sort_assertions)
            # create as many buffer state changes as sorted_times
            buffer.state_changes_time = [
                Int("%s_sc_time_%i" % (buffer.name, k))
                for k in range(len(sorted_times))
            ]

            # add the constraints that give the buffer state change times
            for st, bfst in zip(sorted_times, buffer.state_changes_time):
                self.add_constraint(st == bfst)

            # compute the different buffer states according to state changes
            buffer.buffer_states = [
                Int("%s_state_%i" % (buffer.name, k))
                for k in range(len(buffer.state_changes_time) + 1)
            ]
            # add constraints for buffer states
            # the first buffer state is equal to the buffer initial level
            if buffer.initial_state is not None:
                self.add_constraint(
                    buffer.buffer_states[0] == buffer.initial_state)
            if buffer.final_state is not None:
                self.add_constraint(
                    buffer.buffer_states[-1] == buffer.final_state)
            if buffer.lower_bound is not None:
                for st in buffer.buffer_states:
                    self.add_constraint(st >= buffer.lower_bound)
            if buffer.upper_bound is not None:
                for st in buffer.buffer_states:
                    self.add_constraint(st <= buffer.upper_bound)
            # and, for the other, the buffer state i+1 is the buffer state i +/- the buffer change
            for i in range(len(buffer.buffer_states) - 1):
                self.add_constraint(
                    buffer.buffer_states[i + 1] == buffer.buffer_states[i] +
                    buffer_mapping[buffer.state_changes_time[i]])

        # optimization
        if self.is_optimization_problem:
            self.create_objective()