Пример #1
0
def define_network():
    network = MarabouCore.InputQuery()
    network.setNumberOfVariables(3)

    # x
    network.setLowerBound(0, -1)
    network.setUpperBound(0, 1)

    network.setLowerBound(1, 1)
    network.setUpperBound(1, 2)

    # y
    network.setLowerBound(2, -large)
    network.setUpperBound(2, large)

    MarabouCore.addReluConstraint(network, 0, 1)

    # y - relu(x) >= 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, 2)
    output_equation.addAddend(-1, 1)
    output_equation.setScalar(0)
    # output_equation.dump()
    network.addEquation(output_equation)

    # y <= n * 0.01
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, 1)
    property_eq.setScalar(3)

    return network
Пример #2
0
    def set_input_bounds_template(self, xlim: list, radius: float):
        '''
        set bounds on the input variables
        For example if xlim is [(5,3)], and radius is 0.1, the limit will be:
        0.9 * (5i + 3) <= x[0] <= 1.1 * (5i + 3)
        :param xlim: list of tuples, each tuple is (alpha, beta) which will be used as alpha * i + beta
        :param radius: non negative number, l_infinity around each of the points
        '''
        assert radius >= 0
        assert len(xlim) == len(self.input_idx)
        assert len(self._rnn_loop_idx) > 0
        u_r = 1 + radius  # upper radius
        l_r = 1 - radius  # lower radius
        i_idx = self._rnn_loop_idx[0]
        for i, marabou_idx in enumerate(self.input_idx):
            alpha, beta = xlim[i]
            self.network.setLowerBound(marabou_idx, -LARGE)
            self.network.setUpperBound(marabou_idx, LARGE)
            # TODO: What if alpha / beta == 0?
            # x <= r * alpha * i + r * beta <--> x - r * alpha * i <= r * beta
            ub_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
            ub_eq.addAddend(1, marabou_idx)
            ub_eq.addAddend(-u_r * alpha, i_idx)
            ub_eq.setScalar(u_r * beta)
            self.network.addEquation(ub_eq)

            # x >= r * alpha * i + r * beta <--> x - r * alpha * i >= r * beta
            lb_eq = MarabouCore.Equation(MarabouCore.Equation.GE)
            lb_eq.addAddend(1, marabou_idx)
            lb_eq.addAddend(-l_r * alpha, i_idx)
            lb_eq.setScalar(l_r * beta)
            self.network.addEquation(lb_eq)
Пример #3
0
def add_hidden_state_equations(inputQuery, variables_first_index, input_weight, hidden_weight, num_iterations):
    '''
    add all hidden state equations:
        input_weight * x1 = s1b
        for each k > 1
            input_weight * xi + hidden_weight * s(k-1)f = sib
        and ReLu's
    :param inputQuery: query to append to
    :param variables_first_index: the first index of the hidden vector variable
    :param input_weight: the weight in the input
    :param hidden_weight: the weight for the hidden vector
    :param num_iterations: number of iterations
    :return:
    '''
    equation1 = MarabouCore.Equation()
    equation1.addAddend(input_weight, 0)
    equation1.addAddend(-1, variables_first_index)
    equation1.setScalar(0)
    inputQuery.addEquation(equation1)

    for k in range(1, num_iterations):
        cur_equation = MarabouCore.Equation()
        cur_equation.addAddend(input_weight, k)  # xk
        cur_equation.addAddend(hidden_weight, variables_first_index + (2 * k) - 1)  # s(k-1)f
        cur_equation.addAddend(-1, variables_first_index + (2 * k))  # skb
        cur_equation.setScalar(0)
        inputQuery.addEquation(cur_equation)

    # ReLu's
    for k in range(variables_first_index, variables_first_index + 2 * num_iterations, 2):
        MarabouCore.addReluConstraint(inputQuery, k, k + 1)
Пример #4
0
def define_positive_sum_network(xlim=(-1, 1)):
    '''
    Defines the positive_sum network in a marabou way, without the recurrent part
    i.e. we define:
        s_i b = s_i-1 f + x_i
        y = s_i f
    :param xlim: how to limit the input to the network
    :return: query to marabou that defines the positive_sum rnn network (without recurent)
    '''
    num_params_for_cell = 5

    # Plus one is for the invariant proof, we will add a slack variable
    positive_sum_rnn_query = MarabouCore.InputQuery()
    positive_sum_rnn_query.setNumberOfVariables(num_params_for_cell)  # + extra_params)

    # x
    positive_sum_rnn_query.setLowerBound(0, xlim[0])
    positive_sum_rnn_query.setUpperBound(0, xlim[1])

    # s_i-1 f (or temp in some of my notes)
    positive_sum_rnn_query.setLowerBound(1, 0)
    positive_sum_rnn_query.setUpperBound(1, large)

    # s_i b
    positive_sum_rnn_query.setLowerBound(2, -large)
    positive_sum_rnn_query.setUpperBound(2, large)

    # s_i f
    positive_sum_rnn_query.setLowerBound(3, 0)
    positive_sum_rnn_query.setUpperBound(3, large)

    # y
    positive_sum_rnn_query.setLowerBound(4, -large)
    positive_sum_rnn_query.setUpperBound(4, large)

    # s_i b = x_i * 1 + s_i-1 f * 1
    update_eq = MarabouCore.Equation()
    update_eq.addAddend(1, 0)
    update_eq.addAddend(1, 1)
    update_eq.addAddend(-1, 2)
    update_eq.setScalar(0)
    # update_eq.dump()
    positive_sum_rnn_query.addEquation(update_eq)

    # s_i f = ReLu(s_i b)
    MarabouCore.addReluConstraint(positive_sum_rnn_query, 2, 3)

    # y - skf  = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, 4)
    output_equation.addAddend(-1, 3)
    output_equation.setScalar(0)
    # output_equation.dump()
    positive_sum_rnn_query.addEquation(output_equation)

    return positive_sum_rnn_query
Пример #5
0
def define_positive_sum_network_no_invariant(xlim, ylim, n_iterations):
    '''
    Defines the positive_sum network in a marabou way
        s_i = ReLu(1 * x_i + 1 * s_i-1)
        y = s_k (where k == n_iterations)
    :param xlim: how to limit the input to the network
    :param ylim: how to limit the output of the network
    :param n_iterations: number of inputs / times the rnn cell will be executed
    :return: query to marabou that defines the positive_sum rnn network (without recurent)
    '''
    positive_sum_rnn_query = MarabouCore.InputQuery()
    positive_sum_rnn_query.setNumberOfVariables(1)  # x

    # x
    positive_sum_rnn_query.setLowerBound(0, xlim[0])
    positive_sum_rnn_query.setUpperBound(0, xlim[1])

    rnn_start_idx = 1  # i
    rnn_idx = add_rnn_cell(positive_sum_rnn_query, [(0, 1)],
                           1,
                           n_iterations,
                           print_debug=1)  # rnn_idx == s_i f
    s_i_1_f_idx = rnn_idx - 2
    y_idx = rnn_idx + 1

    def relu(x):
        return max(x, 0)

    positive_sum_rnn_query.setNumberOfVariables(y_idx + 1)

    # y
    positive_sum_rnn_query.setLowerBound(y_idx, -large)
    positive_sum_rnn_query.setUpperBound(y_idx, large)

    # y - skf  = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, y_idx)
    output_equation.addAddend(-1, rnn_idx)
    output_equation.setScalar(0)
    # output_equation.dump()
    positive_sum_rnn_query.addEquation(output_equation)

    # y <= ylim
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, y_idx)
    property_eq.setScalar(ylim[1])

    min_y = relu(relu(xlim[0] * 1) * 1)
    max_y = relu(relu(xlim[1] * 1) * 1)

    initial_values = [[min_y], [max_y]]

    return positive_sum_rnn_query, [rnn_start_idx
                                    ], None, [negate_equation(property_eq)
                                              ], initial_values
Пример #6
0
def adversarial_query(x: list, radius: float, y_idx_max: int, other_idx: int, h5_file_path: str, algorithm_ptr,
                      n_iterations=10, steps_num=5000):
    '''
    Query marabou with adversarial query
    :param x: base_vector (input vector that we want to find a ball around it)
    :param radius: determines the limit of the inputs around the base_vector
    :param y_idx_max: max index in the output layer
    :param other_idx: which index to compare max idx
    :param h5_file_path: path to keras model which we will check on
    :param algorithm_ptr: TODO
    :param n_iterations: number of iterations to run
    :return: True / False, and queries_stats
    '''

    if y_idx_max is None or other_idx is None:
        y_idx_max, other_idx = get_out_idx(x, n_iterations, h5_file_path)
        if y_idx_max == other_idx or y_idx_max is None or other_idx is None:
            # This means all the enteris in the out vector are equal...
            return False, None, None

    xlim = calc_min_max_by_radius(x, radius)
    rnn_model = RnnMarabouModel(h5_file_path, n_iterations)
    rnn_model.set_input_bounds(xlim)

    # output[y_idx_max] >= output[0] <-> output[y_idx_max] - output[0] >= 0, before feeding marabou we negate this
    adv_eq = MarabouCore.Equation(MarabouCore.Equation.GE)
    adv_eq.addAddend(-1, rnn_model.output_idx[other_idx])
    adv_eq.addAddend(1, rnn_model.output_idx[y_idx_max])
    adv_eq.setScalar(0)

    time_eq = MarabouCore.Equation()
    time_eq.addAddend(1, rnn_model.get_start_end_idxs(0)[0][0])
    time_eq.setScalar(n_iterations)

    start_initial_alg = timer()
    algorithm = algorithm_ptr(rnn_model, xlim)
    end_initial_alg = timer()
    # rnn_model.network.dump()

    res, queries_stats = prove_multidim_property(rnn_model, [negate_equation(adv_eq), time_eq], algorithm, debug=1,
                                                 return_queries_stats=True, number_of_steps=steps_num)
    if queries_stats:
        step_times = queries_stats['step_times']['raw']
        step_times.insert(0, end_initial_alg - start_initial_alg)
        queries_stats['step_times'] = {'avg': np.mean(step_times), 'median': np.median(step_times), 'raw': step_times}
        queries_stats['step_queries'] = len(step_times)

    if 'invariant_queries' in queries_stats and 'property_queries' in queries_stats and \
            queries_stats['property_queries'] != queries_stats['invariant_queries']:
        print("What happened?\n", x)
    return res, queries_stats, algorithm.alpha_history
Пример #7
0
def define_zero_network(xlim, ylim, n_iterations):
    '''
    Defines the zero network in a marabou way
    The zero network is a network with two rnn cells, that always outputs zero
    :param xlim: how to limit the input to the network
    :param ylim: how to limit the output of the network, will effect how we create the invariant
    :param n_iterations: number of inputs / times the rnn cell will be executed
    :return: query to marabou that defines the positive_sum rnn network (without recurrent)
    '''
    network = MarabouCore.InputQuery()
    network.setNumberOfVariables(1)  # x

    # x
    network.setLowerBound(0, xlim[0])
    network.setUpperBound(0, xlim[1])

    s_cell_iterator = 1  # i
    s_i_f_idx = add_rnn_cell(network, [(0, 1)], 1, n_iterations)
    z_cell_iterator = network.getNumberOfVariables()
    z_i_f_idx = add_rnn_cell(network, [(0, 1)], 1, n_iterations)
    y_idx = z_i_f_idx + 1

    network.setNumberOfVariables(y_idx + 1)

    # y
    network.setLowerBound(y_idx, -large)
    network.setUpperBound(y_idx, large)

    # y = skf - zkf <--> y - skf + zkf = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, y_idx)
    output_equation.addAddend(-1, s_i_f_idx)
    output_equation.addAddend(1, z_i_f_idx)
    output_equation.setScalar(0)
    # output_equation.dump()
    network.addEquation(output_equation)

    # s_i f - z_i f <= 0.01
    invariant_equation = MarabouCore.Equation(MarabouCore.Equation.LE)
    invariant_equation.addAddend(-1, z_i_f_idx)  # s_i f
    invariant_equation.addAddend(1, s_i_f_idx)  # s_i f
    invariant_equation.setScalar(SMALL)

    # y <= n * 0.01
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, y_idx)
    property_eq.setScalar(ylim)

    return network, [s_cell_iterator,
                     z_cell_iterator], invariant_equation, [property_eq]
Пример #8
0
def define_negative_sum_network(xlim, ylim, n_iterations):
    '''
    Defines the negative network in a marabou way
        s_i = ReLu(-1 * x_i + s_i-1)
        y = s_k (where k == n_iterations)
    :param xlim: how to limit the input to the network
    :param ylim: how to limit the output of the network
    :param n_iterations: number of inputs / times the rnn cell will be executed
    :return: query to marabou that defines the positive_sum rnn network (without recurrent)
    '''
    positive_sum_rnn_query = MarabouCore.InputQuery()
    positive_sum_rnn_query.setNumberOfVariables(1)  # x

    # x
    positive_sum_rnn_query.setLowerBound(0, xlim[0])
    positive_sum_rnn_query.setUpperBound(0, xlim[1])

    rnn_start_idx = 1  # i
    rnn_idx = add_rnn_cell(positive_sum_rnn_query, [(0, -1)], 1,
                           n_iterations)  # rnn_idx == s_i f
    y_idx = rnn_idx + 1

    positive_sum_rnn_query.setNumberOfVariables(y_idx + 1)

    # y
    positive_sum_rnn_query.setLowerBound(y_idx, -large)
    positive_sum_rnn_query.setUpperBound(y_idx, large)

    # y - skf  = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, y_idx)
    output_equation.addAddend(-1, rnn_idx)
    output_equation.setScalar(0)
    # output_equation.dump()
    positive_sum_rnn_query.addEquation(output_equation)

    # s_i f <= i + 1 <--> i - s_i f >= -1
    invariant_equation = MarabouCore.Equation(MarabouCore.Equation.GE)
    invariant_equation.addAddend(1, rnn_start_idx)  # i
    invariant_equation.addAddend(-1, rnn_idx)  # s_i f
    invariant_equation.setScalar(-1)

    # y <= ylim
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, y_idx)
    property_eq.setScalar(ylim[1])

    return positive_sum_rnn_query, [rnn_start_idx
                                    ], invariant_equation, [property_eq]
Пример #9
0
def define_two_sum_network(xlim, ylim, n_ierations):
    '''
    The network gets a series of numbers and outputs two neurons, one sums the positive numbers and the other
    the negative
    The property we will
    :param xlim: how to limit the input to the network
    :param ylim: how to limit the output of the network
    :param n_iterations: number of inputs / times the rnn cell will be executed
    :return: query to marabou that defines the positive_sum rnn network (without recurent)
    '''
    network = MarabouCore.InputQuery()
    network.setNumberOfVariables(1)  # x

    # x
    network.setLowerBound(0, xlim[0])
    network.setUpperBound(0, xlim[1])

    rnn_start_idx = 1  # i
    rnn_idx = add_rnn_cell(network, [(0, 1)], 1,
                           n_ierations)  # rnn_idx == s_i f
    y_idx = rnn_idx + 1

    network.setNumberOfVariables(y_idx + 1)

    # y
    network.setLowerBound(y_idx, -large)
    network.setUpperBound(y_idx, large)

    # y - skf  = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, y_idx)
    output_equation.addAddend(-1, rnn_idx)
    output_equation.setScalar(0)
    # output_equation.dump()
    network.addEquation(output_equation)

    # s_i f <= i <--> i - s_i f >= 0
    invariant_equation = MarabouCore.Equation(MarabouCore.Equation.GE)
    invariant_equation.addAddend(1, rnn_start_idx)  # i
    invariant_equation.addAddend(-1, rnn_idx)  # s_i f
    invariant_equation.setScalar(0)

    # y <= ylim
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, y_idx)
    property_eq.setScalar(ylim[1])

    return network, [rnn_start_idx], invariant_equation, [property_eq]
Пример #10
0
def test_negate_equation_LE():
    # x + y <= 1
    eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    eq.addAddend(1, 0)
    eq.addAddend(1, 1)
    eq.setScalar(1)

    # x + y >= 1 + epsilon
    not_eq = MarabouCore.Equation(MarabouCore.Equation.GE)
    not_eq.addAddend(1, 0)
    not_eq.addAddend(1, 1)
    not_eq.setScalar(1 + SMALL)
    actual_not_eq = negate_equation(eq)

    assert actual_not_eq.equivalent(not_eq)
    assert not eq.equivalent(not_eq)
Пример #11
0
def test_negate_equation_GE():
    # x - y >= 0
    eq = MarabouCore.Equation(MarabouCore.Equation.GE)
    eq.addAddend(1, 1)
    eq.addAddend(-1, 0)  # i
    eq.setScalar(0)

    # x - y <= -epsilon
    not_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    not_eq.addAddend(1, 1)  # s_i b
    not_eq.addAddend(-1, 0)  # i
    not_eq.setScalar(-SMALL)
    actual_not_eq = negate_equation(eq)

    assert actual_not_eq.equivalent(not_eq)
    assert not eq.equivalent(not_eq)
Пример #12
0
        def add_intermediate_layer_equations():
            first_idx = self.network.getNumberOfVariables()
            # times 2 for the b and f variables
            self.network.setNumberOfVariables(first_idx + (output_weights.shape[1] * 2))
            b_indices = range(first_idx, first_idx + (output_weights.shape[1] * 2), 2)
            f_indices = range(first_idx + 1, first_idx + (output_weights.shape[1] * 2), 2)
            for i in range(output_weights.shape[1]):
                cur_b_idx = b_indices[i]
                cur_f_idx = f_indices[i]
                # b variable
                self.network.setLowerBound(cur_b_idx, -LARGE)
                self.network.setUpperBound(cur_b_idx, LARGE)
                # f variable
                self.network.setLowerBound(cur_f_idx, 0)
                self.network.setUpperBound(cur_f_idx, LARGE)

                MarabouCore.addReluConstraint(self.network, cur_b_idx, cur_f_idx)
                # b equation
                eq = MarabouCore.Equation()
                for j, w in enumerate(output_weights[:, i]):
                    eq.addAddend(w, prev_layer_idx[j])
                eq.setScalar(-output_bias_weights[i])
                eq.addAddend(-1, cur_b_idx)
                self.network.addEquation(eq)
            return f_indices
Пример #13
0
def boundEqConflict():
    '''
    Simple presecion exmaple.
    Only two nodes that are conncted with ReLU, and an equation that asks if the ReLU output is very small negative
    :return:
    '''
    network = MarabouCore.InputQuery()
    network.setNumberOfVariables(2)

    network.setLowerBound(0, -5)
    network.setUpperBound(0, 5)

    network.setLowerBound(1, 0)
    network.setUpperBound(1, 5)

    MarabouCore.addReluConstraint(network, 0, 1)

    eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    eq.addAddend(1, 1)
    eq.setScalar(-10**-4)  # -10 ** -4 works
    network.addEquation(eq)

    verbose = 2
    vars1, stats1 = MarabouCore.solve(network, "", 0, verbose)
    if len(vars1) > 0:
        print("SAT")
        print(vars1)
        return False
    else:
        print("UNSAT")
        return True
Пример #14
0
    def prove_rnn_max_property(self, img_patch, rnn_out_idx, max_value, n):
        '''
        prove property on the rnn
        :param rnn_out_idx: one of rnn output idx
        :param max_value: max value for the output
        :param n: number of iterations
        :return:
        '''
        if img_patch is None:
            img_patch = np.array([0.1, 0.2, 0.3, 0.4] * 28)  # 112
            # img_patch = np.array([0.2] * 112)
            # img_patch = np.load('1.pt')
        img_patch = img_patch[:MAX_SIZE]

        self.set_network_description(img_patch, n)

        property_eq = MarabouCore.Equation(MarabouCore.Equation.GE)
        property_eq.addAddend(1, self.rnn_output_idxs[rnn_out_idx])
        property_eq.setScalar(max_value)
        rnn_start_idxs = [i - 3 for i in self.rnn_output_idxs]

        algorithm = IterateAlphasSGD(self.rnn_initial_values, rnn_start_idxs,
                                     self.rnn_output_idxs)
        return prove_multidim_property(self.network, rnn_start_idxs,
                                       self.rnn_output_idxs, [property_eq],
                                       algorithm)
Пример #15
0
def simplify_network_using_invariants(network_define_f, xlim, ylim,
                                      n_iterations):
    network, rnn_start_idxs, invariant_equation, *_ = network_define_f(
        xlim, ylim, n_iterations)

    for idx in rnn_start_idxs:
        for idx2 in rnn_start_idxs:
            if idx != idx2:
                temp_eq = MarabouCore.Equation()
                temp_eq.addAddend(1, idx)
                temp_eq.addAddend(-1, idx2)
                network.addEquation(temp_eq)

    if not isinstance(invariant_equation, list):
        invariant_equation = [invariant_equation]

    for i in range(len(invariant_equation)):
        if not prove_invariant2(network, [rnn_start_idxs[i]],
                                [invariant_equation[i]]):
            print("Fail on invariant: ", i)
            return False
        else:
            # Add the invariant hypothesis for the next proving
            network.addEquation(invariant_equation[i])

    return True
Пример #16
0
    def prove_adv_property(self, img_patch, out_idx_max, out_idx_compare, n):
        '''
        prove property on the rnn
        :param img_path: The input img for the network
        :param out_idx_max: which index in the output should be maximum
        :param n: number of iterations
        :return:
        '''
        if img_patch is None:
            # img_patch = np.array([0.1, 0.2, 0.3, 0.4] * 28) # 112
            img_patch = np.array([0.2] * 112)
        img_patch = img_patch[:MAX_SIZE]

        properties = []
        self.set_network_description(img_patch, n)
        assert len(self.out_idx) > out_idx_max

        property_eq = MarabouCore.Equation(MarabouCore.Equation.GE)
        property_eq.addAddend(1, self.out_idx[out_idx_max])
        property_eq.addAddend(-1, out_idx_compare)
        property_eq.setScalar(small)
        properties.append(property_eq)

        rnn_start_idxs = [i - 3 for i in self.rnn_output_idxs]
        return prove_multidim_property(self.network, rnn_start_idxs,
                                       self.rnn_output_idxs,
                                       self.rnn_initial_values, properties)
Пример #17
0
def add_output_equations(network, rnn_output_idxs, output_weight, output_bias):
    '''
    build equations for the output
    :param network: network to append equations and variables to
    :param rnn_output_idxs: the output indices of the previous layer
    :param output_weight: Weights to multiply the previous layer
    :param output_bias: The bias of each equation
    :return: list of indices of output classes
    '''
    assert (len(rnn_output_idxs) == output_weight.shape[1])
    assert (output_weight.shape[0] == len(output_bias))
    last_idx = network.getNumberOfVariables()
    output_idxs = []
    network.setNumberOfVariables(
        last_idx + (output_weight.shape[0] * 2))  # *2 because of the relu
    for i in range(output_weight.shape[0]):
        b_variable_idx = last_idx + (2 * i)
        f_variable_idx = last_idx + 1 + (2 * i)
        output_idxs.append(f_variable_idx)

        network.setLowerBound(b_variable_idx, -large)
        network.setUpperBound(b_variable_idx, large)
        network.setLowerBound(f_variable_idx, 0)
        network.setUpperBound(f_variable_idx, large)
        MarabouCore.addReluConstraint(network, b_variable_idx, f_variable_idx)

        output_eq = MarabouCore.Equation()
        for j in range(output_weight.shape[1]):
            output_eq.addAddend(output_weight[i, j], rnn_output_idxs[j])

        output_eq.addAddend(-1, b_variable_idx)
        output_eq.setScalar(-output_bias[i])
        network.addEquation(output_eq)

    return output_idxs
Пример #18
0
def alpha_to_equation(start_idx, output_idx, initial_val, new_alpha, inv_type):
    '''
    Create an invariant equation according to the simple template \alpha*i \le R_i OR \alpha*i \ge R_i
    :param start_idx: index of the rnn iterator (i)
    :param output_idx: index of R_i
    :param initial_val: If inv_type = GE the max value of R_1 if inv_type = LE the min of R_1
    :param new_alpha: alpha to use
    :param inv_type: Marabou.Equation.GE / Marabou.Equation.LE
    :return: marabou equation
    '''
    # Need the invariant from both side because they are all depndent in each other
    invariant_equation = MarabouCore.Equation(inv_type)
    invariant_equation.addAddend(1, output_idx)  # b_i
    if inv_type == MarabouCore.Equation.LE:
        ge_better = -1
    else:
        # TODO: I don't like this either
        ge_better = 1
        # ge_better = -1

    invariant_equation.addAddend(new_alpha * ge_better, start_idx)  # i
    # TODO: Why isn't it ge_better * initial_val? if it's LE we want:
    # not ( alpha * i + beta \le R ) \iff -alpha * i - beta > R
    invariant_equation.setScalar(initial_val)
    # invariant_equation.dump()
    return invariant_equation
Пример #19
0
def define_negative_sum_invariant_equations(query):
    '''
    Define the equations for invariant, if needs more params should update the query with them
    and we need to define it in the calling function (not the best way but some
    :param query: marabou definition of the positive_sum network, will be changed if needed
    :return: tuple ([base equations], [step equations], [equations that hold if invariant hold])
    '''
    start_param = query.getNumberOfVariables()
    query.setNumberOfVariables(start_param + 1)

    # Add the slack variable, i
    query.setLowerBound(start_param, 0)
    query.setUpperBound(start_param, large)

    # (s_0 f) = 0
    base_hidden_limit_eq = MarabouCore.Equation()
    base_hidden_limit_eq.addAddend(1, 1)
    base_hidden_limit_eq.setScalar(0)

    # (s_i-1 f) <= i - 1 <--> i - (s_i-1 f) >= 1
    hidden_limit_eq = MarabouCore.Equation(MarabouCore.Equation.GE)
    hidden_limit_eq.addAddend(1, start_param)  # i
    hidden_limit_eq.addAddend(-1, 1)  # s_i-1 f
    hidden_limit_eq.setScalar(1)
    # query.addEquation(hidden_limit_eq)

    # negate the invariant we want to prove
    # not(s_1 b <= 1) <--> s_1 b  > 1  <--> s_1 b >= 1 + \epsilon
    base_output_equation = MarabouCore.Equation(MarabouCore.Equation.GE)
    base_output_equation.addAddend(1, 2)
    base_output_equation.setScalar(1 + small)

    # not (s_i b >= i) <--> s_i b < i <--> s_i b -i >= \epsilon
    output_equation = MarabouCore.Equation(MarabouCore.Equation.GE)
    output_equation.addAddend(1, 2)  # s_i b
    output_equation.addAddend(-1, start_param)  # i
    output_equation.setScalar(small)

    # s_i b <= i
    invariant_equation = MarabouCore.Equation(MarabouCore.Equation.LE)
    invariant_equation.addAddend(1, 2)  # s_i b
    invariant_equation.addAddend(-1, start_param)  # i
    invariant_equation.setScalar(0)

    base_invariant_eq = [base_hidden_limit_eq, base_output_equation]
    step_invariant_eq = [hidden_limit_eq, output_equation]
    return (base_invariant_eq, step_invariant_eq, [invariant_equation])
Пример #20
0
 def get_equation(self, rnn_model: RnnMarabouModel) -> MarabouCore.Equation:
     eq = MarabouCore.Equation(self.eq_type)
     for (v, c) in self.vars_coefficients:
         if self.on_input:
             eq.addAddend(c, rnn_model.input_idx[v])
         else:
             eq.addAddend(c, rnn_model.output_idx[v])
         eq.setScalar(self.scalar)
     return eq
Пример #21
0
def add_rnn_cell(query,
                 input_weights,
                 hidden_weight,
                 num_iterations,
                 bias=0,
                 print_debug=False):
    '''
    Create rnn cell --> add 4 parameters to the query and the equations that describe the cell
    The added parameters are (same order): i, s_i-1 f, s_i b, s_i f
    :param query: the network so far (will add to this)
    :param input_weights: list of tuples, each tuple (variable_idx, weight)
    :param hidden_weight: the weight inside the cell
    :param num_iterations: Number of iterations the cell runs
    :return: the index of the last parameter (which is the output of the cell)
    '''

    last_idx = query.getNumberOfVariables()
    query.setNumberOfVariables(last_idx + 4)  # i, s_i-1 f, s_i b, s_i f

    # i
    # TODO: when doing this we make the number of iterations to be n_iterations + 1
    query.setLowerBound(last_idx, 0)
    query.setUpperBound(last_idx, num_iterations)

    # s_i-1 f
    query.setLowerBound(last_idx + 1, 0)
    query.setUpperBound(last_idx + 1, large)

    # s_i b
    query.setLowerBound(last_idx + 2, -large)
    query.setUpperBound(last_idx + 2, large)

    # s_i f
    query.setLowerBound(last_idx + 3, 0)
    query.setUpperBound(last_idx + 3, large)

    # s_i f = ReLu(s_i b)
    MarabouCore.addReluConstraint(query, last_idx + 2, last_idx + 3)

    # s_i-1 f >= i * \sum (x_j_min * w_j)
    # prev_min_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    # prev_min_eq.addAddend(1, last_idx + 1)
    # prev_min_eq.addAddend(1, last_idx + 1)

    # s_i b = x_j * w_j for all j connected + s_i-1 f * hidden_weight
    update_eq = MarabouCore.Equation()
    for var_idx, weight in input_weights:
        update_eq.addAddend(weight, var_idx)
    update_eq.addAddend(hidden_weight, last_idx + 1)
    update_eq.addAddend(-1, last_idx + 2)
    update_eq.setScalar(-bias)
    # if print_debug:
    #     update_eq.dump()
    query.addEquation(update_eq)

    return last_idx + 3
Пример #22
0
    def getMarabouQuery(self):
        """
        Function to convert network into Marabou Query
        Returns:
            ipq: (MarabouCore.InputQuery) representing query
        """
        ipq = MarabouCore.InputQuery()
        ipq.setNumberOfVariables(self.numVars)
        print("num vars = ", self.numVars)
        i = 0
        # TODO: this is necessary, so IF should be added (if user define -> use the userdefined, else use regular inputs)
        if len(self.userDefineInputVars) > 0:
            for inputVar in self.userDefineInputVars:
                ipq.markInputVariable(inputVar, i)
                i += 1
                print("userDefineInputVar", inputVar)
        else:
            for inputVarArray in self.inputVars:
                for inputVar in inputVarArray.flatten():
                    # ipq.markInputVariable(inputVar, i)
                    i += 1
                    print("inputVar", inputVar)

        i = 0
        for outputVar in self.outputVars.flatten():
            ipq.markOutputVariable(outputVar, i)
            i += 1
            print("outputVar", outputVar)

        for e in self.equList:
            eq = MarabouCore.Equation(e.EquationType)
            for (c, v) in e.addendList:
                assert v < self.numVars
                eq.addAddend(c, v)
            eq.setScalar(e.scalar)
            ipq.addEquation(eq)

        for r in self.reluList:
            assert r[1] < self.numVars and r[0] < self.numVars
            MarabouCore.addReluConstraint(ipq, r[0], r[1])

        for m in self.maxList:
            assert m[1] < self.numVars
            for e in m[0]:
                assert e < self.numVars
            MarabouCore.addMaxConstraint(ipq, m[0], m[1])

        for l in self.lowerBounds:
            assert l < self.numVars
            ipq.setLowerBound(l, self.lowerBounds[l])

        for u in self.upperBounds:
            assert u < self.numVars
            ipq.setUpperBound(u, self.upperBounds[u])

        return ipq
Пример #23
0
    def getMarabouQuery(self):
        """Function to convert network into Marabou InputQuery

        Returns:
            :class:`~maraboupy.MarabouCore.InputQuery`
        """
        ipq = MarabouCore.InputQuery()
        ipq.setNumberOfVariables(self.numVars)

        i = 0
        for inputVarArray in self.inputVars:
            for inputVar in inputVarArray.flatten():
                ipq.markInputVariable(inputVar, i)
                i += 1

        i = 0
        for outputVar in self.outputVars.flatten():
            ipq.markOutputVariable(outputVar, i)
            i += 1

        for e in self.equList:
            eq = MarabouCore.Equation(e.EquationType)
            for (c, v) in e.addendList:
                assert v < self.numVars
                eq.addAddend(c, v)
            eq.setScalar(e.scalar)
            ipq.addEquation(eq)

        for r in self.reluList:
            assert r[1] < self.numVars and r[0] < self.numVars
            MarabouCore.addReluConstraint(ipq, r[0], r[1])

        for m in self.maxList:
            assert m[1] < self.numVars
            for e in m[0]:
                assert e < self.numVars
            MarabouCore.addMaxConstraint(ipq, m[0], m[1])

        for b, f in self.absList:
            MarabouCore.addAbsConstraint(ipq, b, f)

        for b, f in self.signList:
            MarabouCore.addSignConstraint(ipq, b, f)

        for disjunction in self.disjunctionList:
            MarabouCore.addDisjunctionConstraint(ipq, disjunction)

        for l in self.lowerBounds:
            assert l < self.numVars
            ipq.setLowerBound(l, self.lowerBounds[l])

        for u in self.upperBounds:
            assert u < self.numVars
            ipq.setUpperBound(u, self.upperBounds[u])

        return ipq
Пример #24
0
def define_last_network(xlim, ylim, n_iterations):
    '''
    Function that define "last_network" which is an RNN network that outputs the last input parameter
    :param xlim: how to limit the input to the network
    :param ylim: how to limit the output of the network
    :param n_iterations: number of inputs / times the rnn cell will be executed
    :return: (network, [rnn output indices], invariant equation, output equation
    '''
    query = MarabouCore.InputQuery()
    query.setNumberOfVariables(1)

    # x
    query.setLowerBound(0, xlim[0])
    query.setUpperBound(0, xlim[1])

    # rnn, the s_i = 0 * s_i-1 + x * 1
    rnn_idx = add_rnn_cell(query, [(0, 1)], 0, n_iterations)
    y_idx = rnn_idx + 1

    query.setNumberOfVariables(y_idx + 1)
    # y
    query.setLowerBound(y_idx, -large)
    query.setUpperBound(y_idx, large)

    # y - skf  = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, y_idx)
    output_equation.addAddend(-1, rnn_idx)
    output_equation.setScalar(0)
    # output_equation.dump()
    query.addEquation(output_equation)

    # s_i-1 f <= xlim[1]
    invariant_equation = MarabouCore.Equation(MarabouCore.Equation.LE)
    invariant_equation.addAddend(1, rnn_idx - 2)  # s_i-1 f
    invariant_equation.setScalar(xlim[1])

    # y <= ylim
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, y_idx)
    property_eq.setScalar(ylim[1])

    return query, [rnn_idx], invariant_equation, [property_eq]
Пример #25
0
        def create_initial_run_equations(loop_indices, rnn_prev_iteration_idx):
            '''
            Zero the loop indcies and the rnn hidden values (the previous iteration output)
            :return: list of equations to add to marabou
            '''
            loop_equations = []
            for i in loop_indices:
                loop_eq = MarabouCore.Equation()
                loop_eq.addAddend(1, i)
                loop_eq.setScalar(0)
                loop_equations.append(loop_eq)

            # s_i-1 f == 0
            zero_rnn_hidden = []
            for idx in rnn_prev_iteration_idx:
                base_hypothesis = MarabouCore.Equation()
                base_hypothesis.addAddend(1, idx)
                base_hypothesis.setScalar(0)
                zero_rnn_hidden.append(base_hypothesis)
            return loop_equations + zero_rnn_hidden
Пример #26
0
    def getMarabouQuery(self):
        """
        Function to convert network into Marabou Query
        Returns:
            ipq: (MarabouCore.InputQuery) representing query
        """
        ipq = MarabouCore.InputQuery()
        ipq.setNumberOfVariables(self.numVars)

        i = 0
        for inputVarArray in self.inputVars:
            for inputVar in inputVarArray.flatten():
                ipq.markInputVariable(inputVar, i)
                i += 1

        i = 0
        for outputVar in self.outputVars.flatten():
            ipq.markOutputVariable(outputVar, i)
            i += 1

        for e in self.equList:
            eq = MarabouCore.Equation(e.EquationType)
            for (c, v) in e.addendList:
                assert v < self.numVars
                eq.addAddend(c, v)
            eq.setScalar(e.scalar)
            ipq.addEquation(eq)

        for r in self.reluList:
            assert r[1] < self.numVars and r[0] < self.numVars
            MarabouCore.addReluConstraint(ipq, r[0], r[1])

        for m in self.maxList:
            assert m[1] < self.numVars
            for e in m[0]:
                assert e < self.numVars
            MarabouCore.addMaxConstraint(ipq, m[0], m[1])

        for l in self.lowerBounds:
            assert l < self.numVars
            ipq.setLowerBound(l, self.lowerBounds[l])

        for u in self.upperBounds:
            assert u < self.numVars
            ipq.setUpperBound(u, self.upperBounds[u])

        for i, var in enumerate(self.inputVars[0]):
            ipq.markInputVariable(i, var)
        for i, var in enumerate(self.outputVars[0]):
            ipq.markOutputVariable(i, var)

        return ipq
Пример #27
0
 def add_marabou_eq(self, coeffs, variables, eq_type, scalar):
     if eq_type in [ConstraintType('LESS'), ConstraintType('GREATER')]:
         raise NotImplementedError
         # TODO: apply epsilon conversion by adding a slack variable = epsilon
         # to convert from a strict inequality to a non-strict one
     elif eq_type == ConstraintType('NOT_EQUAL'):
         raise NotImplementedError
     assert (len(coeffs) == len(variables))
     eq = MarabouCore.Equation(self.eq_type_map[eq_type])
     for i in range(len(coeffs)):
         eq.addAddend(coeffs[i], variables[i])
     eq.setScalar(scalar)
     self.ipq.addEquation(eq)
Пример #28
0
def define_sum_invariant_max_equations(query):
    '''
    Define the equations for invariant, if needs more params should update the query with them
    and we need to define it in the calling function (not the best way but some
    :param query: marabou definition of the sum network, will be changed if needed
    :return: tuple ([base equations], [step equations], [equations that hold if invariant hold])
    '''
    xlim = (query.getLowerBound(0), query.getUpperBound(0))
    start_param = query.getNumberOfVariables()
    query.setNumberOfVariables(start_param + 1)

    # Add the slack variable, i
    query.setLowerBound(start_param, 0)
    query.setUpperBound(start_param, large)

    # (s_0 f) = 0
    base_hidden_limit_eq_s = MarabouCore.Equation()
    base_hidden_limit_eq_s.addAddend(1, 1)
    base_hidden_limit_eq_s.setScalar(0)

    # (z_0 f) = 0
    base_hidden_limit_eq_z = MarabouCore.Equation()
    base_hidden_limit_eq_z.addAddend(1, 4)
    base_hidden_limit_eq_z.setScalar(0)

    # negate the invariant we want to prove
    # not(s_1 b + z_1 b <= xlim[1]) <--> s_1 b + z_1 b  > xlim[1] <--> s_1 b + z_1 b >= xlim[1] + \epsilon
    base_output_equation = MarabouCore.Equation(MarabouCore.Equation.GE)
    base_output_equation.addAddend(1, 2)
    base_output_equation.addAddend(1, 5)
    base_output_equation.setScalar(xlim[1] + small)

    # TODO: Add also GE from 1 + small and somehow validate also that

    # (s_i-1 f) + (z_i-1 f) <= xlim[1] * (i - 1) <--> (s_i-1 f) + (z_i-1 f) - i * xlim[1] <= -xlim[1]
    hidden_limit_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    hidden_limit_eq.addAddend(-xlim[1], start_param)  # i
    hidden_limit_eq.addAddend(1, 1)  # s_i-1 f
    hidden_limit_eq.addAddend(1, 4)  # z_i-1 f
    hidden_limit_eq.setScalar(-xlim[1])

    # not(s_i b + z_i b <= xlim[1] * i) <--> s_i b + z_i b  > xlim[1] * i <--> s_i b + z_i b - xlim[1] * i >= \epsilon
    output_equation = MarabouCore.Equation(MarabouCore.Equation.GE)
    output_equation.addAddend(1, 2)  # s_i b
    output_equation.addAddend(1, 5)  # z_i b
    output_equation.addAddend(-xlim[1], start_param)  # i
    output_equation.setScalar(-small)
    # TODO: Add also GE from 1 + small and somehow validate also that

    # s_i b + z_i b <= xlim[1] * i
    invariant_equation = MarabouCore.Equation(MarabouCore.Equation.LE)
    invariant_equation.addAddend(1, 2)  # s_i b
    invariant_equation.addAddend(1, 5)  # z_i b
    invariant_equation.addAddend(-xlim[1], start_param)  # i
    invariant_equation.setScalar(0)

    base_invariant_eq = [base_hidden_limit_eq_s, base_hidden_limit_eq_z, base_output_equation]
    step_invariant_eq = [hidden_limit_eq, output_equation]
    return (base_invariant_eq, step_invariant_eq, [invariant_equation])
Пример #29
0
def define_last_invariant_equations(query):
    '''
    Define the equations for invariant, if needs more params should update the query with them
    and we need to define it in the calling function (not the best way but some
    :param query: marabou definition of the positive_sum network, will be changed if needed
    :return: tuple ([base equations], [step equations], [equations that hold if invariant hold])
    '''

    # (s_0 f) = 0
    base_hidden_limit_eq = MarabouCore.Equation()
    base_hidden_limit_eq.addAddend(1, 1)
    base_hidden_limit_eq.setScalar(0)

    # (s_i-1 f) <= xlim
    xlim = (query.getLowerBound(0), query.getUpperBound(0))

    hidden_limit_eq = MarabouCore.Equation(MarabouCore.Equation.GE)
    hidden_limit_eq.addAddend(-1, 1)  # s_i-1 f
    hidden_limit_eq.setScalar(xlim[1])

    # negate the invariant we want to prove
    # not(s_1 b <= xlim[1]) <--> s_1 b  > xlim[1]  <--> s_1 b >= xlim[1] + \epsilon
    base_output_equation = MarabouCore.Equation(MarabouCore.Equation.GE)
    base_output_equation.addAddend(1, 2)
    base_output_equation.setScalar(xlim[1] + small)

    # not (s_i b >= xlim[1]) <--> s_i b < xlim[1] <--> s_i b >= xlim[1] + \epsilon
    output_equation = MarabouCore.Equation(MarabouCore.Equation.GE)
    output_equation.addAddend(1, 2)  # s_i b
    output_equation.setScalar(xlim[1] + small)

    # s_i b <= xlim[1]
    invariant_equation = MarabouCore.Equation(MarabouCore.Equation.LE)
    invariant_equation.addAddend(1, 2)  # s_i b
    invariant_equation.setScalar(xlim[1])

    base_invariant_eq = [base_hidden_limit_eq, base_output_equation]
    step_invariant_eq = [hidden_limit_eq, output_equation]
    return (base_invariant_eq, step_invariant_eq, [invariant_equation])
Пример #30
0
def define_ipq(property_bound):
    """
    This function defines a simple input query directly through MarabouCore
    Arguments:
        property_bound: (float) value of upper bound for x + y
    Returns:
        ipq (MarabouCore.InputQuery) input query object representing network and constraints
    """
    ipq = MarabouCore.InputQuery()
    ipq.setNumberOfVariables(3)

    # x
    ipq.setLowerBound(0, -1)
    ipq.setUpperBound(0, 1)

    # relu(x)
    ipq.setLowerBound(1, 0)
    ipq.setUpperBound(1, LARGE)

    # y
    ipq.setLowerBound(2, -LARGE)
    # if an upper/lower bound is not supplied to Marabou, Marabou uses float min/max

    MarabouCore.addReluConstraint(ipq, 0, 1)

    # y - relu(x) = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, 2)
    output_equation.addAddend(-1, 1)
    output_equation.setScalar(0)
    ipq.addEquation(output_equation)

    # x + y <= property_bound
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, 0)
    property_eq.addAddend(1, 2)
    property_eq.setScalar(property_bound)
    ipq.addEquation(property_eq)
    return ipq