예제 #1
0
파일: tests.py 프로젝트: zeta1999/Marabou
def define_network():
    network = MarabouCore.InputQuery()
    network.setNumberOfVariables(3)

    # x
    network.setLowerBound(0, -1)
    network.setUpperBound(0, 1)

    network.setLowerBound(1, 1)
    network.setUpperBound(1, 2)

    # y
    network.setLowerBound(2, -large)
    network.setUpperBound(2, large)

    MarabouCore.addReluConstraint(network, 0, 1)

    # y - relu(x) >= 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, 2)
    output_equation.addAddend(-1, 1)
    output_equation.setScalar(0)
    # output_equation.dump()
    network.addEquation(output_equation)

    # y <= n * 0.01
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, 1)
    property_eq.setScalar(3)

    return network
예제 #2
0
def boundEqConflict():
    '''
    Simple presecion exmaple.
    Only two nodes that are conncted with ReLU, and an equation that asks if the ReLU output is very small negative
    :return:
    '''
    network = MarabouCore.InputQuery()
    network.setNumberOfVariables(2)

    network.setLowerBound(0, -5)
    network.setUpperBound(0, 5)

    network.setLowerBound(1, 0)
    network.setUpperBound(1, 5)

    MarabouCore.addReluConstraint(network, 0, 1)

    eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    eq.addAddend(1, 1)
    eq.setScalar(-10**-4)  # -10 ** -4 works
    network.addEquation(eq)

    verbose = 2
    vars1, stats1 = MarabouCore.solve(network, "", 0, verbose)
    if len(vars1) > 0:
        print("SAT")
        print(vars1)
        return False
    else:
        print("UNSAT")
        return True
예제 #3
0
    def __init__(self, h5_file_path, n_iterations=10):
        self.network = MarabouCore.InputQuery()
        self.model = tf.keras.models.load_model(h5_file_path)
        # TODO: If the input is 2d wouldn't work
        n_input_nodes = self.model.input_shape[-1]
        prev_layer_idx = list(range(0, n_input_nodes))
        self.input_idx = prev_layer_idx
        self.n_iterations = n_iterations
        self.rnn_out_idx = []

        # Each cell in the list is a triple (in_w, hidden_w, bias), the cells are sorted by layer from input to output
        self.rnn_weights = []

        # save spot for the input nodes
        self.network.setNumberOfVariables(n_input_nodes)
        for layer in self.model.layers:
            if isinstance(layer, tf.keras.layers.SimpleRNN):
                prev_layer_idx = self.add_rnn_simple_layer(layer, prev_layer_idx)
                self.rnn_out_idx.append(prev_layer_idx)
            elif type(layer) == tf.keras.layers.Dense:
                prev_layer_idx = self.add_dense_layer(layer, prev_layer_idx)
            else:
                #
                raise NotImplementedError("{} layer is not supported".format(type(layer)))

        # Save the last layer output indcies
        self.output_idx = list(range(*prev_layer_idx))
        self._rnn_loop_idx = []
        self._rnn_prev_iteration_idx = []
        for layer_out_idx in self.rnn_out_idx:
            self._rnn_loop_idx.append([i - 3 for i in layer_out_idx])
            self._rnn_prev_iteration_idx.append([i - 2 for i in layer_out_idx])

        self.num_rnn_layers = len(self.rnn_out_idx)
예제 #4
0
    def getMarabouQuery(self):
        """Function to convert network into Marabou InputQuery

        Returns:
            :class:`~maraboupy.MarabouCore.InputQuery`
        """
        ipq = MarabouCore.InputQuery()
        ipq.setNumberOfVariables(self.numVars)

        i = 0
        for inputVarArray in self.inputVars:
            for inputVar in inputVarArray.flatten():
                ipq.markInputVariable(inputVar, i)
                i += 1

        i = 0
        for outputVar in self.outputVars.flatten():
            ipq.markOutputVariable(outputVar, i)
            i += 1

        for e in self.equList:
            eq = MarabouCore.Equation(e.EquationType)
            for (c, v) in e.addendList:
                assert v < self.numVars
                eq.addAddend(c, v)
            eq.setScalar(e.scalar)
            ipq.addEquation(eq)

        for r in self.reluList:
            assert r[1] < self.numVars and r[0] < self.numVars
            MarabouCore.addReluConstraint(ipq, r[0], r[1])

        for m in self.maxList:
            assert m[1] < self.numVars
            for e in m[0]:
                assert e < self.numVars
            MarabouCore.addMaxConstraint(ipq, m[0], m[1])

        for b, f in self.absList:
            MarabouCore.addAbsConstraint(ipq, b, f)

        for b, f in self.signList:
            MarabouCore.addSignConstraint(ipq, b, f)

        for disjunction in self.disjunctionList:
            MarabouCore.addDisjunctionConstraint(ipq, disjunction)

        for l in self.lowerBounds:
            assert l < self.numVars
            ipq.setLowerBound(l, self.lowerBounds[l])

        for u in self.upperBounds:
            assert u < self.numVars
            ipq.setUpperBound(u, self.upperBounds[u])

        return ipq
예제 #5
0
def define_positive_sum_network(xlim=(-1, 1)):
    '''
    Defines the positive_sum network in a marabou way, without the recurrent part
    i.e. we define:
        s_i b = s_i-1 f + x_i
        y = s_i f
    :param xlim: how to limit the input to the network
    :return: query to marabou that defines the positive_sum rnn network (without recurent)
    '''
    num_params_for_cell = 5

    # Plus one is for the invariant proof, we will add a slack variable
    positive_sum_rnn_query = MarabouCore.InputQuery()
    positive_sum_rnn_query.setNumberOfVariables(num_params_for_cell)  # + extra_params)

    # x
    positive_sum_rnn_query.setLowerBound(0, xlim[0])
    positive_sum_rnn_query.setUpperBound(0, xlim[1])

    # s_i-1 f (or temp in some of my notes)
    positive_sum_rnn_query.setLowerBound(1, 0)
    positive_sum_rnn_query.setUpperBound(1, large)

    # s_i b
    positive_sum_rnn_query.setLowerBound(2, -large)
    positive_sum_rnn_query.setUpperBound(2, large)

    # s_i f
    positive_sum_rnn_query.setLowerBound(3, 0)
    positive_sum_rnn_query.setUpperBound(3, large)

    # y
    positive_sum_rnn_query.setLowerBound(4, -large)
    positive_sum_rnn_query.setUpperBound(4, large)

    # s_i b = x_i * 1 + s_i-1 f * 1
    update_eq = MarabouCore.Equation()
    update_eq.addAddend(1, 0)
    update_eq.addAddend(1, 1)
    update_eq.addAddend(-1, 2)
    update_eq.setScalar(0)
    # update_eq.dump()
    positive_sum_rnn_query.addEquation(update_eq)

    # s_i f = ReLu(s_i b)
    MarabouCore.addReluConstraint(positive_sum_rnn_query, 2, 3)

    # y - skf  = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, 4)
    output_equation.addAddend(-1, 3)
    output_equation.setScalar(0)
    # output_equation.dump()
    positive_sum_rnn_query.addEquation(output_equation)

    return positive_sum_rnn_query
예제 #6
0
    def getMarabouQuery(self):
        """
        Function to convert network into Marabou Query
        Returns:
            ipq: (MarabouCore.InputQuery) representing query
        """
        ipq = MarabouCore.InputQuery()
        ipq.setNumberOfVariables(self.numVars)
        print("num vars = ", self.numVars)
        i = 0
        # TODO: this is necessary, so IF should be added (if user define -> use the userdefined, else use regular inputs)
        if len(self.userDefineInputVars) > 0:
            for inputVar in self.userDefineInputVars:
                ipq.markInputVariable(inputVar, i)
                i += 1
                print("userDefineInputVar", inputVar)
        else:
            for inputVarArray in self.inputVars:
                for inputVar in inputVarArray.flatten():
                    # ipq.markInputVariable(inputVar, i)
                    i += 1
                    print("inputVar", inputVar)

        i = 0
        for outputVar in self.outputVars.flatten():
            ipq.markOutputVariable(outputVar, i)
            i += 1
            print("outputVar", outputVar)

        for e in self.equList:
            eq = MarabouCore.Equation(e.EquationType)
            for (c, v) in e.addendList:
                assert v < self.numVars
                eq.addAddend(c, v)
            eq.setScalar(e.scalar)
            ipq.addEquation(eq)

        for r in self.reluList:
            assert r[1] < self.numVars and r[0] < self.numVars
            MarabouCore.addReluConstraint(ipq, r[0], r[1])

        for m in self.maxList:
            assert m[1] < self.numVars
            for e in m[0]:
                assert e < self.numVars
            MarabouCore.addMaxConstraint(ipq, m[0], m[1])

        for l in self.lowerBounds:
            assert l < self.numVars
            ipq.setLowerBound(l, self.lowerBounds[l])

        for u in self.upperBounds:
            assert u < self.numVars
            ipq.setUpperBound(u, self.upperBounds[u])

        return ipq
예제 #7
0
 def clear(self):
     # clear marabou query
     self.ipq = MarabouCore.InputQuery()
     self.ipq.setNumberOfVariables(0)
     self.variable_map = {}  # maps string names -> integers
     self.input_vars = []
     self.output_vars = []
     self.constraints = [
     ]  # log of things that have been asserted, for debug/double check
     self.num_relu = 0
예제 #8
0
def define_positive_sum_network_no_invariant(xlim, ylim, n_iterations):
    '''
    Defines the positive_sum network in a marabou way
        s_i = ReLu(1 * x_i + 1 * s_i-1)
        y = s_k (where k == n_iterations)
    :param xlim: how to limit the input to the network
    :param ylim: how to limit the output of the network
    :param n_iterations: number of inputs / times the rnn cell will be executed
    :return: query to marabou that defines the positive_sum rnn network (without recurent)
    '''
    positive_sum_rnn_query = MarabouCore.InputQuery()
    positive_sum_rnn_query.setNumberOfVariables(1)  # x

    # x
    positive_sum_rnn_query.setLowerBound(0, xlim[0])
    positive_sum_rnn_query.setUpperBound(0, xlim[1])

    rnn_start_idx = 1  # i
    rnn_idx = add_rnn_cell(positive_sum_rnn_query, [(0, 1)],
                           1,
                           n_iterations,
                           print_debug=1)  # rnn_idx == s_i f
    s_i_1_f_idx = rnn_idx - 2
    y_idx = rnn_idx + 1

    def relu(x):
        return max(x, 0)

    positive_sum_rnn_query.setNumberOfVariables(y_idx + 1)

    # y
    positive_sum_rnn_query.setLowerBound(y_idx, -large)
    positive_sum_rnn_query.setUpperBound(y_idx, large)

    # y - skf  = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, y_idx)
    output_equation.addAddend(-1, rnn_idx)
    output_equation.setScalar(0)
    # output_equation.dump()
    positive_sum_rnn_query.addEquation(output_equation)

    # y <= ylim
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, y_idx)
    property_eq.setScalar(ylim[1])

    min_y = relu(relu(xlim[0] * 1) * 1)
    max_y = relu(relu(xlim[1] * 1) * 1)

    initial_values = [[min_y], [max_y]]

    return positive_sum_rnn_query, [rnn_start_idx
                                    ], None, [negate_equation(property_eq)
                                              ], initial_values
예제 #9
0
    def getMarabouQuery(self):
        """
        Function to convert network into Marabou Query
        Returns:
            ipq: (MarabouCore.InputQuery) representing query
        """
        ipq = MarabouCore.InputQuery()
        ipq.setNumberOfVariables(self.numVars)

        i = 0
        for inputVarArray in self.inputVars:
            for inputVar in inputVarArray.flatten():
                ipq.markInputVariable(inputVar, i)
                i += 1

        i = 0
        for outputVar in self.outputVars.flatten():
            ipq.markOutputVariable(outputVar, i)
            i += 1

        for e in self.equList:
            eq = MarabouCore.Equation(e.EquationType)
            for (c, v) in e.addendList:
                assert v < self.numVars
                eq.addAddend(c, v)
            eq.setScalar(e.scalar)
            ipq.addEquation(eq)

        for r in self.reluList:
            assert r[1] < self.numVars and r[0] < self.numVars
            MarabouCore.addReluConstraint(ipq, r[0], r[1])

        for m in self.maxList:
            assert m[1] < self.numVars
            for e in m[0]:
                assert e < self.numVars
            MarabouCore.addMaxConstraint(ipq, m[0], m[1])

        for l in self.lowerBounds:
            assert l < self.numVars
            ipq.setLowerBound(l, self.lowerBounds[l])

        for u in self.upperBounds:
            assert u < self.numVars
            ipq.setUpperBound(u, self.upperBounds[u])

        for i, var in enumerate(self.inputVars[0]):
            ipq.markInputVariable(i, var)
        for i, var in enumerate(self.outputVars[0]):
            ipq.markOutputVariable(i, var)

        return ipq
예제 #10
0
    def set_network_description(self, img_patch, n):
        if not self.initial_values:
            self._calc_output_initial_values(img_patch, n)
        self.network = MarabouCore.InputQuery()
        self.network.setNumberOfVariables(0)

        set_img_bounds(img_patch, self.network, self.perturbation_limit)
        self.rnn_output_idxs = add_rnn_cells(self.network, self.w_in, self.w_h,
                                             self.b_h, n)

        self.out_idx = add_output_equations(self.network, self.rnn_output_idxs,
                                            self.w_out, self.b_out)
        print("output idxs are", self.out_idx)
예제 #11
0
def define_zero_network(xlim, ylim, n_iterations):
    '''
    Defines the zero network in a marabou way
    The zero network is a network with two rnn cells, that always outputs zero
    :param xlim: how to limit the input to the network
    :param ylim: how to limit the output of the network, will effect how we create the invariant
    :param n_iterations: number of inputs / times the rnn cell will be executed
    :return: query to marabou that defines the positive_sum rnn network (without recurrent)
    '''
    network = MarabouCore.InputQuery()
    network.setNumberOfVariables(1)  # x

    # x
    network.setLowerBound(0, xlim[0])
    network.setUpperBound(0, xlim[1])

    s_cell_iterator = 1  # i
    s_i_f_idx = add_rnn_cell(network, [(0, 1)], 1, n_iterations)
    z_cell_iterator = network.getNumberOfVariables()
    z_i_f_idx = add_rnn_cell(network, [(0, 1)], 1, n_iterations)
    y_idx = z_i_f_idx + 1

    network.setNumberOfVariables(y_idx + 1)

    # y
    network.setLowerBound(y_idx, -large)
    network.setUpperBound(y_idx, large)

    # y = skf - zkf <--> y - skf + zkf = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, y_idx)
    output_equation.addAddend(-1, s_i_f_idx)
    output_equation.addAddend(1, z_i_f_idx)
    output_equation.setScalar(0)
    # output_equation.dump()
    network.addEquation(output_equation)

    # s_i f - z_i f <= 0.01
    invariant_equation = MarabouCore.Equation(MarabouCore.Equation.LE)
    invariant_equation.addAddend(-1, z_i_f_idx)  # s_i f
    invariant_equation.addAddend(1, s_i_f_idx)  # s_i f
    invariant_equation.setScalar(SMALL)

    # y <= n * 0.01
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, y_idx)
    property_eq.setScalar(ylim)

    return network, [s_cell_iterator,
                     z_cell_iterator], invariant_equation, [property_eq]
def test_statistics():
    """
    Test that a query generated from Maraboupy can be saved and loaded correctly and return sat
    """
    ipq = MarabouCore.InputQuery()
    ipq.setNumberOfVariables(1)
    ipq.setLowerBound(0, -1)
    ipq.setUpperBound(0, 1)

    opt = createOptions(verbosity = 0) # Turn off printing
    exitCode, vals, stats = MarabouCore.solve(ipq, opt, "")
    assert(stats.getUnsignedAttribute(MarabouCore.StatisticsUnsignedAttribute.NUM_SPLITS) == 0)
    assert(stats.getLongAttribute(MarabouCore.StatisticsLongAttribute.NUM_MAIN_LOOP_ITERATIONS) == 2)
    assert(stats.getDoubleAttribute(MarabouCore.StatisticsDoubleAttribute.MAX_DEGRADATION) == 0)
예제 #13
0
def define_negative_sum_network(xlim, ylim, n_iterations):
    '''
    Defines the negative network in a marabou way
        s_i = ReLu(-1 * x_i + s_i-1)
        y = s_k (where k == n_iterations)
    :param xlim: how to limit the input to the network
    :param ylim: how to limit the output of the network
    :param n_iterations: number of inputs / times the rnn cell will be executed
    :return: query to marabou that defines the positive_sum rnn network (without recurrent)
    '''
    positive_sum_rnn_query = MarabouCore.InputQuery()
    positive_sum_rnn_query.setNumberOfVariables(1)  # x

    # x
    positive_sum_rnn_query.setLowerBound(0, xlim[0])
    positive_sum_rnn_query.setUpperBound(0, xlim[1])

    rnn_start_idx = 1  # i
    rnn_idx = add_rnn_cell(positive_sum_rnn_query, [(0, -1)], 1,
                           n_iterations)  # rnn_idx == s_i f
    y_idx = rnn_idx + 1

    positive_sum_rnn_query.setNumberOfVariables(y_idx + 1)

    # y
    positive_sum_rnn_query.setLowerBound(y_idx, -large)
    positive_sum_rnn_query.setUpperBound(y_idx, large)

    # y - skf  = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, y_idx)
    output_equation.addAddend(-1, rnn_idx)
    output_equation.setScalar(0)
    # output_equation.dump()
    positive_sum_rnn_query.addEquation(output_equation)

    # s_i f <= i + 1 <--> i - s_i f >= -1
    invariant_equation = MarabouCore.Equation(MarabouCore.Equation.GE)
    invariant_equation.addAddend(1, rnn_start_idx)  # i
    invariant_equation.addAddend(-1, rnn_idx)  # s_i f
    invariant_equation.setScalar(-1)

    # y <= ylim
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, y_idx)
    property_eq.setScalar(ylim[1])

    return positive_sum_rnn_query, [rnn_start_idx
                                    ], invariant_equation, [property_eq]
예제 #14
0
def define_two_sum_network(xlim, ylim, n_ierations):
    '''
    The network gets a series of numbers and outputs two neurons, one sums the positive numbers and the other
    the negative
    The property we will
    :param xlim: how to limit the input to the network
    :param ylim: how to limit the output of the network
    :param n_iterations: number of inputs / times the rnn cell will be executed
    :return: query to marabou that defines the positive_sum rnn network (without recurent)
    '''
    network = MarabouCore.InputQuery()
    network.setNumberOfVariables(1)  # x

    # x
    network.setLowerBound(0, xlim[0])
    network.setUpperBound(0, xlim[1])

    rnn_start_idx = 1  # i
    rnn_idx = add_rnn_cell(network, [(0, 1)], 1,
                           n_ierations)  # rnn_idx == s_i f
    y_idx = rnn_idx + 1

    network.setNumberOfVariables(y_idx + 1)

    # y
    network.setLowerBound(y_idx, -large)
    network.setUpperBound(y_idx, large)

    # y - skf  = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, y_idx)
    output_equation.addAddend(-1, rnn_idx)
    output_equation.setScalar(0)
    # output_equation.dump()
    network.addEquation(output_equation)

    # s_i f <= i <--> i - s_i f >= 0
    invariant_equation = MarabouCore.Equation(MarabouCore.Equation.GE)
    invariant_equation.addAddend(1, rnn_start_idx)  # i
    invariant_equation.addAddend(-1, rnn_idx)  # s_i f
    invariant_equation.setScalar(0)

    # y <= ylim
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, y_idx)
    property_eq.setScalar(ylim[1])

    return network, [rnn_start_idx], invariant_equation, [property_eq]
예제 #15
0
def build_query(img_example: list, ylim: list, model: MnistModel):
    '''

    :param xlim: list of tuples, each cell is for input variable, and the tuple is (min_val, max_val)
    :param ylim: list of tuples, each cell is for an output variable, and the tuple is (min_val, max_val)
    :param pendulum_model: initialized model with get_rnn_weights and get_output_weights
    :return:
    '''
    network = MarabouCore.InputQuery()
    network.setNumberOfVariables(0)
    last_idx = set_img_bounds(img_example, network)

    rnn_input_weights, rnn_hidden_weights, rnn_bias = model.get_rnn_weights()
    rnn_output_idx = add_rnn_cells(network, rnn_input_weights, rnn_bias,
                                   rnn_hidden_weights)
    output_weights, output_bias_weights = model.get_output_weights()
    add_output_equations(network, rnn_output_idx, output_weights,
                         output_bias_weights)
    return network
예제 #16
0
def define_last_network(xlim, ylim, n_iterations):
    '''
    Function that define "last_network" which is an RNN network that outputs the last input parameter
    :param xlim: how to limit the input to the network
    :param ylim: how to limit the output of the network
    :param n_iterations: number of inputs / times the rnn cell will be executed
    :return: (network, [rnn output indices], invariant equation, output equation
    '''
    query = MarabouCore.InputQuery()
    query.setNumberOfVariables(1)

    # x
    query.setLowerBound(0, xlim[0])
    query.setUpperBound(0, xlim[1])

    # rnn, the s_i = 0 * s_i-1 + x * 1
    rnn_idx = add_rnn_cell(query, [(0, 1)], 0, n_iterations)
    y_idx = rnn_idx + 1

    query.setNumberOfVariables(y_idx + 1)
    # y
    query.setLowerBound(y_idx, -large)
    query.setUpperBound(y_idx, large)

    # y - skf  = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, y_idx)
    output_equation.addAddend(-1, rnn_idx)
    output_equation.setScalar(0)
    # output_equation.dump()
    query.addEquation(output_equation)

    # s_i-1 f <= xlim[1]
    invariant_equation = MarabouCore.Equation(MarabouCore.Equation.LE)
    invariant_equation.addAddend(1, rnn_idx - 2)  # s_i-1 f
    invariant_equation.setScalar(xlim[1])

    # y <= ylim
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, y_idx)
    property_eq.setScalar(ylim[1])

    return query, [rnn_idx], invariant_equation, [property_eq]
예제 #17
0
def define_ipq(property_bound):
    """
    This function defines a simple input query directly through MarabouCore
    Arguments:
        property_bound: (float) value of upper bound for x + y
    Returns:
        ipq (MarabouCore.InputQuery) input query object representing network and constraints
    """
    ipq = MarabouCore.InputQuery()
    ipq.setNumberOfVariables(3)

    # x
    ipq.setLowerBound(0, -1)
    ipq.setUpperBound(0, 1)

    # relu(x)
    ipq.setLowerBound(1, 0)
    ipq.setUpperBound(1, LARGE)

    # y
    ipq.setLowerBound(2, -LARGE)
    # if an upper/lower bound is not supplied to Marabou, Marabou uses float min/max

    MarabouCore.addReluConstraint(ipq, 0, 1)

    # y - relu(x) = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, 2)
    output_equation.addAddend(-1, 1)
    output_equation.setScalar(0)
    ipq.addEquation(output_equation)

    # x + y <= property_bound
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, 0)
    property_eq.addAddend(1, 2)
    property_eq.setScalar(property_bound)
    ipq.addEquation(property_eq)
    return ipq
예제 #18
0
def unfold_sum_rnn(n_iterations, xlim=(-1, 1), ylim=(-1, 1)):
    i = 0  # index for variable number
    inputQuery = MarabouCore.InputQuery()

    num_variables = n_iterations  # the x input
    s_first_index = num_variables
    num_variables += n_iterations * 2  # for each temporal state (2 because of the ReLu)
    y_index = num_variables
    num_variables += 1  # for y

    inputQuery.setNumberOfVariables(num_variables)

    for _ in range(n_iterations):
        inputQuery.setLowerBound(i, xlim[0])
        inputQuery.setUpperBound(i, xlim[1])
        i += 1

    add_rnn_cell_bounds(inputQuery, n_iterations, s_first_index, large)  # add s_i

    # output
    inputQuery.setLowerBound(y_index, ylim[0])
    inputQuery.setUpperBound(y_index, ylim[1])

    add_hidden_state_equations(inputQuery, s_first_index, 1, 1, n_iterations)

    # y - skf = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, y_index)
    output_equation.addAddend(-1, y_index - 1)
    output_equation.setScalar(0)
    inputQuery.addEquation(output_equation)

    vars1, stats1 = MarabouCore.solve(inputQuery, "", 0)
    if len(vars1) > 0:
        print("SAT")
        print(vars1)
    else:
        print("UNSAT")
예제 #19
0
    def getMarabouQuery(self):
        """
        Function to convert network into Marabou Query
        Returns:
            ipq: (MarabouCore.InputQuery) representing query
        """
        ipq = MarabouCore.InputQuery()
        ipq.setNumberOfVariables(self.numVars)

        for e in self.equList:
            eq = MarabouCore.Equation(e.EquationType)
            for (c, v) in e.addendList:
                assert v < self.numVars
                eq.addAddend(c, v)
            eq.setScalar(e.scalar)
            ipq.addEquation(eq)

        for r in self.reluList:
            assert r[1] < self.numVars and r[0] < self.numVars
            MarabouCore.addReluConstraint(ipq, r[0], r[1])

        for m in self.maxList:
            assert m[1] < self.numVars
            for e in m[0]:
                assert e < self.numVars
            MarabouCore.addMaxConstraint(ipq, m[0], m[1])

        for l in self.lowerBounds:
            assert l < self.numVars
            ipq.setLowerBound(l, self.lowerBounds[l])

        for u in self.upperBounds:
            assert u < self.numVars
            ipq.setUpperBound(u, self.upperBounds[u])

        return ipq
예제 #20
0
def define_positive_sum_linear_input_constraint(x_addition, x_time_multiply,
                                                ylim, n_iterations):
    '''
    Defines the positive_sum network in a marabou way
        s_i = ReLu(1 * x_i + 1 * s_i-1)
        y = s_k (where k == n_iterations)
    We bound x to be c*i+b (where i is the time stamp)
    :param x_addition: tuple with two entries (min, max), of the b value in x bounds
    :param x_time_multiply: tuple with two entries (min, max), of the c value in x bounds
    :param ylim: how to limit the output of the network
    :param n_iterations: number of inputs / times the rnn cell will be executed
    :return: query to marabou that defines the positive_sum rnn network (without recurent)
    '''
    positive_sum_rnn_query = MarabouCore.InputQuery()
    positive_sum_rnn_query.setNumberOfVariables(1)  # x

    rnn_start_idx = 1  # i
    rnn_idx = add_rnn_cell(positive_sum_rnn_query, [(0, 1)],
                           1,
                           n_iterations,
                           print_debug=1)  # rnn_idx == s_i f
    s_i_1_f_idx = rnn_idx - 2
    y_idx = rnn_idx + 1

    # x bounds
    x_bound_min_eq = MarabouCore.Equation(MarabouCore.Equation.GE)
    x_bound_min_eq.addAddend(1, 0)  # x
    x_bound_min_eq.addAddend(-x_time_multiply[0], rnn_start_idx)  # c * i
    x_bound_min_eq.setScalar(x_addition[0])
    positive_sum_rnn_query.addEquation(x_bound_min_eq)

    x_bound_max_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    x_bound_max_eq.addAddend(1, 0)  # x
    x_bound_max_eq.addAddend(-x_time_multiply[1], rnn_start_idx)  # c * i
    x_bound_max_eq.setScalar(x_addition[1])
    positive_sum_rnn_query.addEquation(x_bound_max_eq)

    def relu(x):
        return max(x, 0)

    positive_sum_rnn_query.setNumberOfVariables(y_idx + 1)

    # y
    positive_sum_rnn_query.setLowerBound(y_idx, -large)
    positive_sum_rnn_query.setUpperBound(y_idx, large)

    # y - skf  = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, y_idx)
    output_equation.addAddend(-1, rnn_idx)
    output_equation.setScalar(0)
    # output_equation.dump()
    positive_sum_rnn_query.addEquation(output_equation)

    # y <= ylim
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, y_idx)
    property_eq.setScalar(ylim[1])

    min_y_0 = relu(relu(x_addition[0] + x_time_multiply[0] * 1) * 1)
    max_y_0 = relu(relu(x_addition[1] + x_time_multiply[1] * 1) * 1)

    initial_values = [max_y_0]

    return positive_sum_rnn_query, [rnn_start_idx], None, [property_eq
                                                           ], initial_values
예제 #21
0
    def define_B_network(xlim, n_iterations, hidden_weight):
        '''
        Define an adversarial robustness examples
        1 <= x_0 <= 2
        s_i = 10 * x_0 +  - hidden_weight * s_(i-1)
        A = s_i
        B = 10
        prove that after n_iterations A >= B
        :param xlim: array of tuples, each array cell as an input (x_0, x_1 etc..) tuple is (min_value, max_value)
        :param n_iterations: number of iterations
        :return: network, [s_cell_iterator, z_cell_iterator], [a_invariant_equation, b_invariant_equation],\
               (min_a, max_b), [a_base_equation, b_base_equation], (-alpha, alpha)
        '''
        network = MarabouCore.InputQuery()
        network.setNumberOfVariables(len(xlim))  # x1, x2

        # x1
        network.setLowerBound(0, xlim[0][0])
        network.setUpperBound(0, xlim[0][1])

        s_hidden_w = hidden_weight
        x0_s_w = 10

        s_cell_iterator = network.getNumberOfVariables()  # i
        s_i_f_idx = add_rnn_cell(network, [(0, x0_s_w)], s_hidden_w, n_iterations, print_debug=True)

        a_idx = s_i_f_idx + 1
        b_idx = a_idx + 1

        a_value = 5
        a_w = 1
        b_w = 1

        network.setNumberOfVariables(b_idx + 1)

        # A
        network.setLowerBound(a_idx, -large)  # A
        network.setUpperBound(a_idx, large)

        # B
        network.setLowerBound(b_idx, -large)  # B
        network.setUpperBound(b_idx, large)

        a_fix_val = MarabouCore.Equation()
        a_fix_val.addAddend(1, a_idx)
        a_fix_val.setScalar(a_value)
        a_fix_val.dump()
        network.addEquation(a_fix_val)

        # B = zkf <--> B - z_k_f = 0
        b_output_eq = MarabouCore.Equation()
        b_output_eq.addAddend(1, b_idx)
        b_output_eq.addAddend(-b_w, s_i_f_idx)
        b_output_eq.setScalar(0)
        b_output_eq.dump()
        network.addEquation(b_output_eq)

        min_b = relu(relu(xlim[0][0] * x0_s_w) * b_w)
        max_b = relu(relu(xlim[0][1] * x0_s_w) * b_w)

        min_a = a_value
        max_a = a_value

        return network, [s_cell_iterator], None, (min_a, max_b), None
예제 #22
0
def define_concatenate_rnn(xlim, ylim, n_iterations):
    '''
        xlim[0] <= x_0 <= xlim[1]
        s_i = 1 * x_0 + 1 * s_i-1
        z_i = 1 * s_i + 1 * z_i-1
        y = z_i
    '''
    query = MarabouCore.InputQuery()
    query.setNumberOfVariables(1)  # x

    # x
    query.setLowerBound(0, xlim[0])
    query.setUpperBound(0, xlim[1])

    rnn_1_start_idx = 1  # i
    rnn_1_idx = add_rnn_cell(query, [(0, 1)], 1, n_iterations,
                             print_debug=1)  # rnn_idx == s_i f
    rnn_2_start_idx = rnn_1_idx + 1  # i
    rnn_2_idx = add_rnn_cell(query, [(rnn_1_idx, 1)],
                             1,
                             n_iterations,
                             print_debug=1)  # rnn_idx == s_i f
    y_idx = rnn_2_idx + 1

    def relu(x):
        return max(0, x)

    min_s1 = relu(xlim[0] * 1)
    max_s1 = relu(xlim[1] * 1)
    min_z1 = relu(min_s1 * 1)
    max_z1 = relu(max_s1 * 1)

    query.setNumberOfVariables(y_idx + 1)

    # y
    query.setLowerBound(y_idx, -large)
    query.setUpperBound(y_idx, large)

    # y - skf  = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, y_idx)
    output_equation.addAddend(-1, rnn_2_idx)
    output_equation.setScalar(0)
    # output_equation.dump()
    query.addEquation(output_equation)

    # s_i_f >= i + 1 <--> -1 >= - s_i_f + i
    invariant_1_equation = MarabouCore.Equation(MarabouCore.Equation.GE)
    invariant_1_equation.addAddend(1, rnn_1_start_idx)  # i
    invariant_1_equation.addAddend(-1, rnn_1_idx)  # s_i f
    invariant_1_equation.setScalar(-1)

    # z_i f >= n_iterations * (i + 1) <--> -n >= n * i - z_i_f
    invariant_2_equation = MarabouCore.Equation(MarabouCore.Equation.GE)
    invariant_2_equation.addAddend(n_iterations + 1, rnn_2_start_idx)  # i
    invariant_2_equation.addAddend(-1, rnn_2_idx)  # z_i f
    invariant_2_equation.setScalar(-1)

    # y <= ylim
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, y_idx)
    property_eq.setScalar(ylim[1])

    return query, [rnn_1_start_idx, rnn_2_start_idx
                   ], [invariant_1_equation,
                       invariant_2_equation], [property_eq], [max_s1, max_z1]
예제 #23
0
def define_concatenate_rnn_invariant_not_holding(xlim, ylim, n_iterations):
    '''
    defining a network with two rnn's one after the other, so the input to the second rnn is the output of the first
    Here the invariant of the second rnn does not hold.
    :param xlim:
    :param ylim:
    :param n_iterations:
    :return:
    '''
    positive_sum_rnn_query = MarabouCore.InputQuery()
    positive_sum_rnn_query.setNumberOfVariables(1)  # x

    # x
    positive_sum_rnn_query.setLowerBound(0, xlim[0])
    positive_sum_rnn_query.setUpperBound(0, xlim[1])

    rnn_1_start_idx = 1  # i
    rnn_1_idx = add_rnn_cell(positive_sum_rnn_query, [(0, 1)],
                             1,
                             n_iterations,
                             print_debug=1)  # rnn_idx == s_i f
    rnn_2_start_idx = rnn_1_idx + 1  # i
    rnn_2_idx = add_rnn_cell(positive_sum_rnn_query, [(rnn_1_idx, 1)],
                             1,
                             n_iterations,
                             print_debug=1)  # rnn_idx == s_i f
    y_idx = rnn_2_idx + 1

    positive_sum_rnn_query.setNumberOfVariables(y_idx + 1)

    # y
    positive_sum_rnn_query.setLowerBound(y_idx, -large)
    positive_sum_rnn_query.setUpperBound(y_idx, large)

    # y - skf  = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, y_idx)
    output_equation.addAddend(-1, rnn_2_idx)
    output_equation.setScalar(0)
    # output_equation.dump()
    positive_sum_rnn_query.addEquation(output_equation)

    # s_i_f >= i + 1 <--> -1 >= - s_i_f + i
    invariant_1_equation = MarabouCore.Equation(MarabouCore.Equation.GE)
    invariant_1_equation.addAddend(1, rnn_1_start_idx)  # i
    invariant_1_equation.addAddend(-1, rnn_1_idx)  # s_i f
    invariant_1_equation.setScalar(-1)

    # z_i f >= n_iterations * (i + 1) <--> -n >= n * i - z_i_f
    invariant_2_equation = MarabouCore.Equation(MarabouCore.Equation.GE)
    invariant_2_equation.addAddend(n_iterations + 1, rnn_2_start_idx)  # i
    invariant_2_equation.addAddend(-1, rnn_2_idx)  # z_i f
    invariant_2_equation.setScalar(-1)

    # y <= ylim
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, y_idx)
    property_eq.setScalar(ylim[1])

    return positive_sum_rnn_query, [rnn_1_start_idx, rnn_2_start_idx], [
        invariant_1_equation, invariant_2_equation
    ], [property_eq]
예제 #24
0
    w_in_1 = [1]  # w_in_1 = [1, 1]

    w_h_0 = [1, 1]
    w_h_1 = [0, 0]
    b_h = [0, 0]
    w_out_0 = [1]
    w_out_1 = [1]

    # w_in_0 = [0.1, 0.2]
    # w_in_1 = [0.3, 0.4]
    # w_h_0 = [0.5, 0.7]
    # w_h_1 = [0.2, 0.3]
    # w_out_0 = [0.2]
    # w_out_1 = [1.2]

    network = MarabouCore.InputQuery()
    network.setNumberOfVariables(0)

    pertubation_limit = 0.1
    set_img_bounds(img, network, pertubation_limit)

    x_min = img * (1 - pertubation_limit)
    x_max = img * (1 + pertubation_limit)

    w_in = np.array([w_in_0, w_in_1])
    w_h = np.array([w_h_0, w_h_1])

    rnn_output_idxs = add_rnn_cells(network, w_in, w_h, b_h, n)
    rnn_start_idxs = [i - 3 for i in rnn_output_idxs]

    w_out = np.array([1, 0])
예제 #25
0
def define_adversarial_robustness_two_input_nodes_step_fail(xlim, n_iterations):
    '''
    Define an adversarial robustness examples, where it will not be possible to find an invariant that will work
    0 <= x_0 <= 1
    1 <= x_1 <= 2
    s_i = 1 * x_0 + 5 * x_1 + 1 * s_(i-1)
    z_i = 2 * x_0 + 1 * x_1 + 100 * z_(i-1)
    A = s_i
    B = z_i
    therefore:
        5 <= A <= 6
        1 <= B <= 4
    prove that after n_iterations A >= B
    :param xlim: array of tuples, each array cell as an input (x_0, x_1 etc..) tuple is (min_value, max_value)
    :param n_iterations: number of iterations
    :return: network, [s_cell_iterator, z_cell_iterator], [a_invariant_equation, b_invariant_equation],\
           (min_a, max_b), [a_base_equation, b_base_equation], (-alpha, alpha)
    '''
    network = MarabouCore.InputQuery()
    network.setNumberOfVariables(len(xlim))  # x1, x2

    # x1
    network.setLowerBound(0, xlim[0][0])
    network.setUpperBound(0, xlim[0][1])

    # x2
    network.setLowerBound(1, xlim[1][0])
    network.setUpperBound(1, xlim[1][1])

    s_hidden_w = 1
    z_hidden_w = 100
    x0_s_w = 1
    x1_s_w = 5
    x0_z_w = 2
    x1_z_w = 1

    # s_i_f = relu(2 * x1 + 1 * x2 + 1.5*s_i-1_f)
    s_cell_iterator = network.getNumberOfVariables()  # i
    s_i_f_idx = add_rnn_cell(network, [(0, x0_s_w), (1, x1_s_w)], s_hidden_w, n_iterations, print_debug=True)
    s_i_1_f_idx = s_i_f_idx - 2
    # z_i_f = relu(1 * x1 + 10 * x2 + z_i-1_f)
    z_cell_iterator = network.getNumberOfVariables()
    z_i_f_idx = add_rnn_cell(network, [(0, x0_z_w), (1, x1_z_w)], z_hidden_w, n_iterations, print_debug=True)
    z_i_1_f_idx = z_i_f_idx -

    a_idx = z_i_f_idx + 1
    b_idx = a_idx + 1

    a_w = 1
    b_w = 1

    network.setNumberOfVariables(b_idx + 1)

    # A
    network.setLowerBound(a_idx, -large)  # A
    network.setUpperBound(a_idx, large)

    # B
    network.setLowerBound(b_idx, -large)  # B
    network.setUpperBound(b_idx, large)

    # # A = skf <--> A - skf = 0
    a_output_eq = MarabouCore.Equation()
    a_output_eq.addAddend(1, a_idx)
    a_output_eq.addAddend(-a_w, s_i_f_idx)
    a_output_eq.setScalar(0)
    a_output_eq.dump()
    network.addEquation(a_output_eq)

    # B = zkf <--> B - z_k_f = 0
    b_output_eq = MarabouCore.Equation()
    b_output_eq.addAddend(1, b_idx)
    b_output_eq.addAddend(-b_w, z_i_f_idx)
    b_output_eq.setScalar(0)
    b_output_eq.dump()
    network.addEquation(b_output_eq)

    min_b = relu(relu(xlim[0][0] * x0_z_w) + relu(xlim[1][0] * x1_z_w) * b_w)
    max_b = relu(relu(xlim[0][1] * x0_z_w) + relu(xlim[1][1] * x1_z_w) * b_w)

    min_a = relu(relu(xlim[0][0] * x0_s_w) + relu(xlim[1][0] * x1_s_w) * a_w)
    max_a = relu(relu(xlim[0][1] * x0_s_w) + relu(xlim[1][1] * x1_s_w) * a_w)

    initial_diff = min_a - max_b
    # assert initial_diff >= 0
    alpha = initial_diff / (2 * n_iterations) + max_b
    print('min_a', min_a)
    print('max_b', max_b)
    print('initial_diff', initial_diff)
    print('alpha', alpha)

    a_invariant_equation = MarabouCore.Equation(MarabouCore.Equation.GE)
    a_invariant_equation.addAddend(1, s_i_f_idx)  # a_i
    a_invariant_equation.addAddend(alpha, s_cell_iterator)  # i
    a_invariant_equation.setScalar(min_a)

    b_invariant_equation = MarabouCore.Equation(MarabouCore.Equation.LE)
    b_invariant_equation.addAddend(1, z_i_f_idx)  # a_i
    b_invariant_equation.addAddend(-alpha, z_cell_iterator)  # i
    b_invariant_equation.setScalar(max_b)

    return network, [s_cell_iterator, z_cell_iterator], [a_invariant_equation, b_invariant_equation], \
           (min_a, max_b), (-alpha, alpha)
예제 #26
0
#  x0 + x2b = 0
#  x1f + x2f - x3 = 0
#
#  x1f = Relu(x1b)
#  x2f = Relu(x2b)
#
#   x0: x0
#   x1: x1b
#   x2: x1f
#   x3: x2b
#   x4: x2f
#   x5: x3

large = 10.0

inputQuery = MarabouCore.InputQuery()

inputQuery.setNumberOfVariables(6)

inputQuery.setLowerBound(0, 0)
inputQuery.setUpperBound(0, 1)

inputQuery.setLowerBound(1, -large)
inputQuery.setUpperBound(1, large)

inputQuery.setLowerBound(2, 0)
inputQuery.setUpperBound(2, large)

inputQuery.setLowerBound(3, -large)
inputQuery.setUpperBound(3, large)
예제 #27
0
def define_sum_network(xlim=(-1, 1)):
    '''
    Defines the sum network in a marabou way, without the recurrent part
    i.e. we define:
        s_i b = s_i-1 f + x_i
        y = s_i f
    :param xlim: how to limit the input to the network
    :return: query to marabou that defines the sum rnn network (without recurent)
    '''
    num_params_for_cell = 8

    sum_rnn_query = MarabouCore.InputQuery()
    sum_rnn_query.setNumberOfVariables(num_params_for_cell)

    # x
    sum_rnn_query.setLowerBound(0, xlim[0])
    sum_rnn_query.setUpperBound(0, xlim[1])

    # s_i-1 f (or temp in some of my notes)
    sum_rnn_query.setLowerBound(1, 0)
    sum_rnn_query.setUpperBound(1, large)

    # s_i b
    sum_rnn_query.setLowerBound(2, -large)
    sum_rnn_query.setUpperBound(2, large)

    # s_i f
    sum_rnn_query.setLowerBound(3, 0)
    sum_rnn_query.setUpperBound(3, large)

    # z_i-1 f
    sum_rnn_query.setLowerBound(4, 0)
    sum_rnn_query.setUpperBound(4, large)

    # z_i b
    sum_rnn_query.setLowerBound(5, -large)
    sum_rnn_query.setUpperBound(5, large)

    # z_i f
    sum_rnn_query.setLowerBound(6, 0)
    sum_rnn_query.setUpperBound(6, large)

    # y
    sum_rnn_query.setLowerBound(7, -large)
    sum_rnn_query.setUpperBound(7, large)

    # s_i b = x_i * 1 + s_i-1 f * 1
    update_eq = MarabouCore.Equation()
    update_eq.addAddend(1, 0)
    update_eq.addAddend(1, 1)
    update_eq.addAddend(-1, 2)
    update_eq.setScalar(0)
    sum_rnn_query.addEquation(update_eq)

    # s_i f = ReLu(s_i b)
    MarabouCore.addReluConstraint(sum_rnn_query, 2, 3)

    # z_i b = -x_i + z_i-1 f
    update_eq = MarabouCore.Equation()
    update_eq.addAddend(-1, 0)
    update_eq.addAddend(1, 4)
    update_eq.addAddend(-1, 5)
    update_eq.setScalar(0)
    sum_rnn_query.addEquation(update_eq)

    # z_i f = ReLu(z_i b)
    MarabouCore.addReluConstraint(sum_rnn_query, 5, 6)

    # - y + skf  + zkf = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, 3)
    output_equation.addAddend(1, 6)
    output_equation.addAddend(-1, 7)
    output_equation.setScalar(0)
    sum_rnn_query.addEquation(output_equation)

    return sum_rnn_query
예제 #28
0
 def getMarabouQuery(self, property_path=""):
     self.ipq = MarabouCore.InputQuery()
     MarabouCore.createInputQuery(self.ipq, self.network_path,
                                  property_path)
예제 #29
0
def define_adversarial_robustness_concatenate_rnn(xlim, n_iterations):
    '''
    xlim[0] <= x_0 <= xlim[1]
    s1_i = 10 * x_0 + 0.5 * s1_i-1
    s2_i = 1 * s1_i + 1 * s2_i-1
    z1_i = 1 * x_0 + 1 * z1_i-1
    z2_i = 1 * z1_i + 1 * z2_i-1
    A = s2
    B = z2
    indcies:
        0: x_0
        1: i
        2: s1_i-1
        3: s1_i_b
        4: s1_i_f
        5: i
        6: z1_i-1
        7: z1_i_b
        8: z1_i_f
        9: i
        10: s2_i-1
        11: s2_i_b
        12: s2_i_f
        13: i
        14: z2_i-1
        15: z2_i_b
        16: z2_i_f
        17: A
        18: B
    '''
    query = MarabouCore.InputQuery()
    query.setNumberOfVariables(len(xlim))  # x

    # x
    query.setLowerBound(0, xlim[0][0])
    query.setUpperBound(0, xlim[0][1])

    w_x_s1 = 10
    w_s1_s2 = 1
    w_s1_s1 = 0.5
    w_s2_s2 = 1
    w_s2_a = 1

    w_x_z1 = 1
    w_z1_z2 = 1
    w_z1_z1 = 1
    w_z2_z2 = 1
    w_z2_b = 1

    s1_out_idx = add_rnn_cell(query, [(0, w_x_s1)], w_s1_s1, n_iterations)
    z1_out_idx = add_rnn_cell(query, [(0, w_x_z1)], w_z1_z1, n_iterations)

    s2_out_idx = add_rnn_cell(query, [(s1_out_idx, w_s1_s2)], w_s2_s2, n_iterations)
    z2_out_idx = add_rnn_cell(query, [(z1_out_idx, w_z1_z2)], w_z2_z2, n_iterations)

    a_idx = z2_out_idx + 1
    b_idx = a_idx + 1
    print("s1_out_idx:", s1_out_idx)
    print("s2_out_idx:", s2_out_idx)
    print("z1_out_idx:", z1_out_idx)
    print("z2_out_idx:", z2_out_idx)
    print("a_idx:", a_idx)
    print("b_idx:", b_idx)

    query.setNumberOfVariables(b_idx + 1)

    query.setLowerBound(a_idx, -large)
    query.setUpperBound(a_idx, large)
    query.setLowerBound(b_idx, -large)
    query.setUpperBound(b_idx, large)

    a_output_equation = MarabouCore.Equation()
    a_output_equation.addAddend(1, a_idx)
    a_output_equation.addAddend(-w_s2_a, s2_out_idx)
    a_output_equation.setScalar(0)
    query.addEquation(a_output_equation)

    b_output_equation = MarabouCore.Equation()
    b_output_equation.addAddend(1, b_idx)
    b_output_equation.addAddend(-w_z2_b, z2_out_idx)
    b_output_equation.setScalar(0)
    query.addEquation(b_output_equation)

    min_s1 = relu(xlim[0][0] * w_x_s1)
    min_s2 = relu(relu(xlim[0][0] * w_x_s1) * w_s1_s2)
    max_z1 = relu(xlim[0][1] * w_x_z1)
    max_z2 = relu(relu(xlim[0][1] * w_x_z1) * w_z1_z2)

    # min_a = relu(relu(relu(xlim[0][0] * w_x_s1) * w_s1_s2) * w_s2_a)
    # max_a = relu(relu(relu(xlim[0][1] * w_x_s1) * w_s1_s2) * w_s2_a)
    # min_b = relu(relu(relu(xlim[0][0] * w_x_z1) * w_z1_z2) * w_z2_b)
    # max_b = relu(relu(relu(xlim[0][1] * w_x_z1) * w_z1_z2) * w_z2_b)
    # print('min_a', min_a)
    # print('max_a', max_a)
    # print('min_b', min_b)
    # print('max_b', max_b)

    # This means that the only cell that is dependent on s1 is s2, and same for z1 and z2
    rnn_dependent = [[2], [3], None, None]
    return query, [i - 3 for i in [s1_out_idx, z1_out_idx, s2_out_idx, z2_out_idx]], None, (
        min_s1, max_z1, min_s2, max_z2), rnn_dependent
예제 #30
0
def define_adversarial_robustness_two_input_nodes_two_hidden(
        xlim, n_iterations):
    '''
    Define an adversarial roubstness examples
    0 <= x_0 <= 1
    1 <= x_1 <= 2
    s_i = 1 * x_0 + 5 * x_1 + 1 * s_(i-1)
    z_i = 2 * x_0 + 1 * x_1 + 1 * z_(i-1)
    A = s_i
    B = z_i
    therefore:
        5 <= A <= 6
        1 <= B <= 4
    prove that after n_iterations A >= B
    :param xlim: array of tuples, each array cell as an input (x_0, x_1 etc..) tuple is (min_value, max_value)
    :param n_iterations: number of iterations
    :return: network, rnn_output_idx, initial_values, adv_eq
    '''
    network = MarabouCore.InputQuery()
    network.setNumberOfVariables(len(xlim))  # x1, x2

    # x1
    network.setLowerBound(0, xlim[0][0])
    network.setUpperBound(0, xlim[0][1])

    # x2
    network.setLowerBound(1, xlim[1][0])
    network.setUpperBound(1, xlim[1][1])

    s_s_hidden_w = 1
    s_z_hidden_w = 1
    z_s_hidden_w = 0.9
    z_z_hidden_w = 1
    x0_s_w = 1
    x1_s_w = 4
    x0_z_w = 2
    x1_z_w = 1

    rnn_output_idx = add_rnn_multidim_cells(
        network, [0, 1], np.array([[x0_s_w, x1_s_w], [x0_z_w, x1_z_w]]),
        np.array([[s_s_hidden_w, s_z_hidden_w], [z_s_hidden_w, z_z_hidden_w]]),
        [0, 0], n_iterations)
    a_idx = rnn_output_idx[-1] + 1
    b_idx = a_idx + 1

    a_w = 1
    b_w = 1

    network.setNumberOfVariables(b_idx + 1)

    # A
    network.setLowerBound(a_idx, -LARGE)  # A
    network.setUpperBound(a_idx, LARGE)

    # B
    network.setLowerBound(b_idx, -LARGE)  # B
    network.setUpperBound(b_idx, LARGE)

    # # A = skf <--> A - skf = 0
    a_output_eq = MarabouCore.Equation()
    a_output_eq.addAddend(1, a_idx)
    a_output_eq.addAddend(-a_w, rnn_output_idx[0])
    a_output_eq.setScalar(0)
    a_output_eq.dump()
    network.addEquation(a_output_eq)

    # B = zkf <--> B - z_k_f = 0
    b_output_eq = MarabouCore.Equation()
    b_output_eq.addAddend(1, b_idx)
    b_output_eq.addAddend(-b_w, rnn_output_idx[1])
    b_output_eq.setScalar(0)
    b_output_eq.dump()
    network.addEquation(b_output_eq)

    min_b = relu(relu(xlim[0][0] * x0_z_w) + relu(xlim[1][0] * x1_z_w) * b_w)
    max_b = relu(relu(xlim[0][1] * x0_z_w) + relu(xlim[1][1] * x1_z_w) * b_w)

    min_a = relu(relu(xlim[0][0] * x0_s_w) + relu(xlim[1][0] * x1_s_w) * a_w)
    max_a = relu(relu(xlim[0][1] * x0_s_w) + relu(xlim[1][1] * x1_s_w) * a_w)
    initial_values = ([min_a, min_b], [max_a, max_b])
    print('a: {} {}'.format(min_a, max_a))
    print('b: {} {}'.format(min_b, max_b))

    # A >0 B <-> A - B >= 0
    adv_eq = MarabouCore.Equation(MarabouCore.Equation.GE)
    adv_eq.addAddend(1, a_idx)
    adv_eq.addAddend(-1, b_idx)
    adv_eq.setScalar(0)

    return network, rnn_output_idx, initial_values, [negate_equation(adv_eq)]