Ejemplo n.º 1
0
 def assert_relu_constraint(self, relu):
     if len(np.array(relu.varin).flatten()) > 1:
         print("ERROR: relu.varin is not scalar! It has length",
               len(relu.varin))
         raise NotImplementedError
     else:  # truly, the ok case
         self.num_relu += 1
         MarabouCore.addReluConstraint(self.ipq,
                                       self.get_new_var(relu.varin),
                                       self.get_new_var(relu.varout))
Ejemplo n.º 2
0
 def saveQuery(self, filename=""):
     """
     Serializes the inputQuery in the given filename
     Arguments:
         filename: (string) path to redirect output to
     Returns:
         None
     """
     ipq = self.getMarabouQuery()
     MarabouCore.saveQuery(ipq, filename)
Ejemplo n.º 3
0
 def saveQuery(self, filename=""):
     """
     Serializes the inputQuery in the given filename
     Arguments:
         filename: (string) file to write serialized inputQuery
     Returns:
         None
     """
     ipq = self.getMarabouQuery()
     MarabouCore.saveQuery(ipq, filename)
Ejemplo n.º 4
0
def define_last_network(xlim=(-1, 1)):
    '''
    Defines the positive_sum network in a marabou way, without the recurrent part
    i.e. we define:
        s_i b = s_i-1 f + x_i
        y = s_i f
    :param xlim: how to limit the input to the network
    :return: query to marabou that defines the positive_sum rnn network (without recurent)
    '''
    num_params_for_cell = 5

    query = MarabouCore.InputQuery()
    query.setNumberOfVariables(num_params_for_cell)

    # x
    query.setLowerBound(0, xlim[0])
    query.setUpperBound(0, xlim[1])

    # s_i-1 f (or temp in some of my notes)
    query.setLowerBound(1, 0)
    query.setUpperBound(1, large)

    # s_i b
    query.setLowerBound(2, -large)
    query.setUpperBound(2, large)

    # s_i f
    query.setLowerBound(3, 0)
    query.setUpperBound(3, large)

    # y
    query.setLowerBound(4, -large)
    query.setUpperBound(4, large)

    # s_i b = x_i * 1
    update_eq = MarabouCore.Equation()
    update_eq.addAddend(1, 0)
    # update_eq.addAddend(1, 1)
    update_eq.addAddend(-1, 2)
    update_eq.setScalar(0)
    # update_eq.dump()
    query.addEquation(update_eq)

    # s_i f = ReLu(s_i b)
    MarabouCore.addReluConstraint(query, 2, 3)

    # y - skf  = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, 4)
    output_equation.addAddend(-1, 3)
    output_equation.setScalar(0)
    # output_equation.dump()
    query.addEquation(output_equation)

    return query
Ejemplo n.º 5
0
def define_positive_sum_network_no_invariant(xlim, ylim, n_iterations):
    '''
    Defines the positive_sum network in a marabou way
        s_i = ReLu(1 * x_i + 1 * s_i-1)
        y = s_k (where k == n_iterations)
    :param xlim: how to limit the input to the network
    :param ylim: how to limit the output of the network
    :param n_iterations: number of inputs / times the rnn cell will be executed
    :return: query to marabou that defines the positive_sum rnn network (without recurent)
    '''
    positive_sum_rnn_query = MarabouCore.InputQuery()
    positive_sum_rnn_query.setNumberOfVariables(1)  # x

    # x
    positive_sum_rnn_query.setLowerBound(0, xlim[0])
    positive_sum_rnn_query.setUpperBound(0, xlim[1])

    rnn_start_idx = 1  # i
    rnn_idx = add_rnn_cell(positive_sum_rnn_query, [(0, 1)],
                           1,
                           n_iterations,
                           print_debug=1)  # rnn_idx == s_i f
    s_i_1_f_idx = rnn_idx - 2
    y_idx = rnn_idx + 1

    def relu(x):
        return max(x, 0)

    positive_sum_rnn_query.setNumberOfVariables(y_idx + 1)

    # y
    positive_sum_rnn_query.setLowerBound(y_idx, -large)
    positive_sum_rnn_query.setUpperBound(y_idx, large)

    # y - skf  = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, y_idx)
    output_equation.addAddend(-1, rnn_idx)
    output_equation.setScalar(0)
    # output_equation.dump()
    positive_sum_rnn_query.addEquation(output_equation)

    # y <= ylim
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, y_idx)
    property_eq.setScalar(ylim[1])

    min_y = relu(relu(xlim[0] * 1) * 1)
    max_y = relu(relu(xlim[1] * 1) * 1)

    initial_values = [[min_y], [max_y]]

    return positive_sum_rnn_query, [rnn_start_idx
                                    ], None, [negate_equation(property_eq)
                                              ], initial_values
Ejemplo n.º 6
0
    def getMarabouQuery(self):
        """Function to convert network into Marabou InputQuery

        Returns:
            :class:`~maraboupy.MarabouCore.InputQuery`
        """
        ipq = MarabouCore.InputQuery()
        ipq.setNumberOfVariables(self.numVars)

        i = 0
        for inputVarArray in self.inputVars:
            for inputVar in inputVarArray.flatten():
                ipq.markInputVariable(inputVar, i)
                i += 1

        i = 0
        for outputVar in self.outputVars.flatten():
            ipq.markOutputVariable(outputVar, i)
            i += 1

        for e in self.equList:
            eq = MarabouCore.Equation(e.EquationType)
            for (c, v) in e.addendList:
                assert v < self.numVars
                eq.addAddend(c, v)
            eq.setScalar(e.scalar)
            ipq.addEquation(eq)

        for r in self.reluList:
            assert r[1] < self.numVars and r[0] < self.numVars
            MarabouCore.addReluConstraint(ipq, r[0], r[1])

        for m in self.maxList:
            assert m[1] < self.numVars
            for e in m[0]:
                assert e < self.numVars
            MarabouCore.addMaxConstraint(ipq, m[0], m[1])

        for b, f in self.absList:
            MarabouCore.addAbsConstraint(ipq, b, f)

        for b, f in self.signList:
            MarabouCore.addSignConstraint(ipq, b, f)

        for l in self.lowerBounds:
            assert l < self.numVars
            ipq.setLowerBound(l, self.lowerBounds[l])

        for u in self.upperBounds:
            assert u < self.numVars
            ipq.setUpperBound(u, self.upperBounds[u])

        return ipq
    def evaluateWithMarabou(self,
                            inputValues,
                            filename="evaluateWithMarabou.log",
                            options=None):
        """Function to evaluate network at a given point using Marabou as solver

        Args:
            inputValues (list of np arrays): Inputs to evaluate
            filename (str): Path to redirect output if using Marabou solver, defaults to "evaluateWithMarabou.log"
            options (:class:`~maraboupy.MarabouCore.Options`): Object for specifying Marabou options, defaults to None

        Returns:
            (list of np arrays): Values representing the outputs of the network or None if system is UNSAT
        """
        # Make sure inputValues is a list of np arrays and not list of lists
        inputValues = [np.array(inVal) for inVal in inputValues]

        inputVars = self.inputVars  # list of numpy arrays
        outputVars = self.outputVars  # list of numpy arrays

        inputDict = dict()
        inputVarList = np.concatenate([inVar.flatten() for inVar in inputVars],
                                      axis=-1).flatten()
        inputValList = np.concatenate(
            [inVal.flatten() for inVal in inputValues]).flatten()
        assignList = zip(inputVarList, inputValList)
        for x in assignList:
            inputDict[x[0]] = x[1]

        ipq = self.getMarabouQuery()
        for k in inputDict:
            ipq.setLowerBound(k, inputDict[k])
            ipq.setUpperBound(k, inputDict[k])

        if options == None:
            options = MarabouCore.Options()
        exitCode, outputDict, _ = MarabouCore.solve(ipq, options,
                                                    str(filename))

        # When the query is UNSAT an empty dictionary is returned
        if outputDict == {}:
            return None

        outputValues = [
            outVars.reshape(-1).astype(np.float64) for outVars in outputVars
        ]
        for i in range(len(outputValues)):
            for j in range(len(outputValues[i])):
                outputValues[i][j] = outputDict[outputValues[i][j]]
            outputValues[i] = outputValues[i].reshape(outputVars[i].shape)
        return outputValues
Ejemplo n.º 8
0
def adversarial_query(x: list, radius: float, y_idx_max: int, other_idx: int, h5_file_path: str, algorithm_ptr,
                      n_iterations=10, steps_num=5000):
    '''
    Query marabou with adversarial query
    :param x: base_vector (input vector that we want to find a ball around it)
    :param radius: determines the limit of the inputs around the base_vector
    :param y_idx_max: max index in the output layer
    :param other_idx: which index to compare max idx
    :param h5_file_path: path to keras model which we will check on
    :param algorithm_ptr: TODO
    :param n_iterations: number of iterations to run
    :return: True / False, and queries_stats
    '''

    if y_idx_max is None or other_idx is None:
        y_idx_max, other_idx = get_out_idx(x, n_iterations, h5_file_path)
        if y_idx_max == other_idx or y_idx_max is None or other_idx is None:
            # This means all the enteris in the out vector are equal...
            return False, None, None

    xlim = calc_min_max_by_radius(x, radius)
    rnn_model = RnnMarabouModel(h5_file_path, n_iterations)
    rnn_model.set_input_bounds(xlim)

    # output[y_idx_max] >= output[0] <-> output[y_idx_max] - output[0] >= 0, before feeding marabou we negate this
    adv_eq = MarabouCore.Equation(MarabouCore.Equation.GE)
    adv_eq.addAddend(-1, rnn_model.output_idx[other_idx])
    adv_eq.addAddend(1, rnn_model.output_idx[y_idx_max])
    adv_eq.setScalar(0)

    time_eq = MarabouCore.Equation()
    time_eq.addAddend(1, rnn_model.get_start_end_idxs(0)[0][0])
    time_eq.setScalar(n_iterations)

    start_initial_alg = timer()
    algorithm = algorithm_ptr(rnn_model, xlim)
    end_initial_alg = timer()
    # rnn_model.network.dump()

    res, queries_stats = prove_multidim_property(rnn_model, [negate_equation(adv_eq), time_eq], algorithm, debug=1,
                                                 return_queries_stats=True, number_of_steps=steps_num)
    if queries_stats:
        step_times = queries_stats['step_times']['raw']
        step_times.insert(0, end_initial_alg - start_initial_alg)
        queries_stats['step_times'] = {'avg': np.mean(step_times), 'median': np.median(step_times), 'raw': step_times}
        queries_stats['step_queries'] = len(step_times)

    if 'invariant_queries' in queries_stats and 'property_queries' in queries_stats and \
            queries_stats['property_queries'] != queries_stats['invariant_queries']:
        print("What happened?\n", x)
    return res, queries_stats, algorithm.alpha_history
def test_statistics():
    """
    Test that a query generated from Maraboupy can be saved and loaded correctly and return sat
    """
    ipq = MarabouCore.InputQuery()
    ipq.setNumberOfVariables(1)
    ipq.setLowerBound(0, -1)
    ipq.setUpperBound(0, 1)

    opt = createOptions(verbosity = 0) # Turn off printing
    exitCode, vals, stats = MarabouCore.solve(ipq, opt, "")
    assert(stats.getUnsignedAttribute(MarabouCore.StatisticsUnsignedAttribute.NUM_SPLITS) == 0)
    assert(stats.getLongAttribute(MarabouCore.StatisticsLongAttribute.NUM_MAIN_LOOP_ITERATIONS) == 2)
    assert(stats.getDoubleAttribute(MarabouCore.StatisticsDoubleAttribute.MAX_DEGRADATION) == 0)
Ejemplo n.º 10
0
def define_zero_network(xlim, ylim, n_iterations):
    '''
    Defines the zero network in a marabou way
    The zero network is a network with two rnn cells, that always outputs zero
    :param xlim: how to limit the input to the network
    :param ylim: how to limit the output of the network, will effect how we create the invariant
    :param n_iterations: number of inputs / times the rnn cell will be executed
    :return: query to marabou that defines the positive_sum rnn network (without recurrent)
    '''
    network = MarabouCore.InputQuery()
    network.setNumberOfVariables(1)  # x

    # x
    network.setLowerBound(0, xlim[0])
    network.setUpperBound(0, xlim[1])

    s_cell_iterator = 1  # i
    s_i_f_idx = add_rnn_cell(network, [(0, 1)], 1, n_iterations)
    z_cell_iterator = network.getNumberOfVariables()
    z_i_f_idx = add_rnn_cell(network, [(0, 1)], 1, n_iterations)
    y_idx = z_i_f_idx + 1

    network.setNumberOfVariables(y_idx + 1)

    # y
    network.setLowerBound(y_idx, -large)
    network.setUpperBound(y_idx, large)

    # y = skf - zkf <--> y - skf + zkf = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, y_idx)
    output_equation.addAddend(-1, s_i_f_idx)
    output_equation.addAddend(1, z_i_f_idx)
    output_equation.setScalar(0)
    # output_equation.dump()
    network.addEquation(output_equation)

    # s_i f - z_i f <= 0.01
    invariant_equation = MarabouCore.Equation(MarabouCore.Equation.LE)
    invariant_equation.addAddend(-1, z_i_f_idx)  # s_i f
    invariant_equation.addAddend(1, s_i_f_idx)  # s_i f
    invariant_equation.setScalar(SMALL)

    # y <= n * 0.01
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, y_idx)
    property_eq.setScalar(ylim)

    return network, [s_cell_iterator,
                     z_cell_iterator], invariant_equation, [property_eq]
Ejemplo n.º 11
0
def define_negative_sum_network(xlim, ylim, n_iterations):
    '''
    Defines the negative network in a marabou way
        s_i = ReLu(-1 * x_i + s_i-1)
        y = s_k (where k == n_iterations)
    :param xlim: how to limit the input to the network
    :param ylim: how to limit the output of the network
    :param n_iterations: number of inputs / times the rnn cell will be executed
    :return: query to marabou that defines the positive_sum rnn network (without recurrent)
    '''
    positive_sum_rnn_query = MarabouCore.InputQuery()
    positive_sum_rnn_query.setNumberOfVariables(1)  # x

    # x
    positive_sum_rnn_query.setLowerBound(0, xlim[0])
    positive_sum_rnn_query.setUpperBound(0, xlim[1])

    rnn_start_idx = 1  # i
    rnn_idx = add_rnn_cell(positive_sum_rnn_query, [(0, -1)], 1,
                           n_iterations)  # rnn_idx == s_i f
    y_idx = rnn_idx + 1

    positive_sum_rnn_query.setNumberOfVariables(y_idx + 1)

    # y
    positive_sum_rnn_query.setLowerBound(y_idx, -large)
    positive_sum_rnn_query.setUpperBound(y_idx, large)

    # y - skf  = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, y_idx)
    output_equation.addAddend(-1, rnn_idx)
    output_equation.setScalar(0)
    # output_equation.dump()
    positive_sum_rnn_query.addEquation(output_equation)

    # s_i f <= i + 1 <--> i - s_i f >= -1
    invariant_equation = MarabouCore.Equation(MarabouCore.Equation.GE)
    invariant_equation.addAddend(1, rnn_start_idx)  # i
    invariant_equation.addAddend(-1, rnn_idx)  # s_i f
    invariant_equation.setScalar(-1)

    # y <= ylim
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, y_idx)
    property_eq.setScalar(ylim[1])

    return positive_sum_rnn_query, [rnn_start_idx
                                    ], invariant_equation, [property_eq]
Ejemplo n.º 12
0
def define_two_sum_network(xlim, ylim, n_ierations):
    '''
    The network gets a series of numbers and outputs two neurons, one sums the positive numbers and the other
    the negative
    The property we will
    :param xlim: how to limit the input to the network
    :param ylim: how to limit the output of the network
    :param n_iterations: number of inputs / times the rnn cell will be executed
    :return: query to marabou that defines the positive_sum rnn network (without recurent)
    '''
    network = MarabouCore.InputQuery()
    network.setNumberOfVariables(1)  # x

    # x
    network.setLowerBound(0, xlim[0])
    network.setUpperBound(0, xlim[1])

    rnn_start_idx = 1  # i
    rnn_idx = add_rnn_cell(network, [(0, 1)], 1,
                           n_ierations)  # rnn_idx == s_i f
    y_idx = rnn_idx + 1

    network.setNumberOfVariables(y_idx + 1)

    # y
    network.setLowerBound(y_idx, -large)
    network.setUpperBound(y_idx, large)

    # y - skf  = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, y_idx)
    output_equation.addAddend(-1, rnn_idx)
    output_equation.setScalar(0)
    # output_equation.dump()
    network.addEquation(output_equation)

    # s_i f <= i <--> i - s_i f >= 0
    invariant_equation = MarabouCore.Equation(MarabouCore.Equation.GE)
    invariant_equation.addAddend(1, rnn_start_idx)  # i
    invariant_equation.addAddend(-1, rnn_idx)  # s_i f
    invariant_equation.setScalar(0)

    # y <= ylim
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, y_idx)
    property_eq.setScalar(ylim[1])

    return network, [rnn_start_idx], invariant_equation, [property_eq]
Ejemplo n.º 13
0
def test_negate_equation_LE():
    # x + y <= 1
    eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    eq.addAddend(1, 0)
    eq.addAddend(1, 1)
    eq.setScalar(1)

    # x + y >= 1 + epsilon
    not_eq = MarabouCore.Equation(MarabouCore.Equation.GE)
    not_eq.addAddend(1, 0)
    not_eq.addAddend(1, 1)
    not_eq.setScalar(1 + SMALL)
    actual_not_eq = negate_equation(eq)

    assert actual_not_eq.equivalent(not_eq)
    assert not eq.equivalent(not_eq)
Ejemplo n.º 14
0
def test_negate_equation_GE():
    # x - y >= 0
    eq = MarabouCore.Equation(MarabouCore.Equation.GE)
    eq.addAddend(1, 1)
    eq.addAddend(-1, 0)  # i
    eq.setScalar(0)

    # x - y <= -epsilon
    not_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    not_eq.addAddend(1, 1)  # s_i b
    not_eq.addAddend(-1, 0)  # i
    not_eq.setScalar(-SMALL)
    actual_not_eq = negate_equation(eq)

    assert actual_not_eq.equivalent(not_eq)
    assert not eq.equivalent(not_eq)
Ejemplo n.º 15
0
    def __init__(self, h5_file_path, n_iterations=10):
        self.network = MarabouCore.InputQuery()
        self.model = tf.keras.models.load_model(h5_file_path)
        # TODO: If the input is 2d wouldn't work
        n_input_nodes = self.model.input_shape[-1]
        prev_layer_idx = list(range(0, n_input_nodes))
        self.input_idx = prev_layer_idx
        self.n_iterations = n_iterations
        self.rnn_out_idx = []

        # Each cell in the list is a triple (in_w, hidden_w, bias), the cells are sorted by layer from input to output
        self.rnn_weights = []

        # save spot for the input nodes
        self.network.setNumberOfVariables(n_input_nodes)
        for layer in self.model.layers:
            if isinstance(layer, tf.keras.layers.SimpleRNN):
                prev_layer_idx = self.add_rnn_simple_layer(layer, prev_layer_idx)
                self.rnn_out_idx.append(prev_layer_idx)
            elif type(layer) == tf.keras.layers.Dense:
                prev_layer_idx = self.add_dense_layer(layer, prev_layer_idx)
            else:
                #
                raise NotImplementedError("{} layer is not supported".format(type(layer)))

        # Save the last layer output indcies
        self.output_idx = list(range(*prev_layer_idx))
        self._rnn_loop_idx = []
        self._rnn_prev_iteration_idx = []
        for layer_out_idx in self.rnn_out_idx:
            self._rnn_loop_idx.append([i - 3 for i in layer_out_idx])
            self._rnn_prev_iteration_idx.append([i - 2 for i in layer_out_idx])

        self.num_rnn_layers = len(self.rnn_out_idx)
Ejemplo n.º 16
0
def simplify_network_using_invariants(network_define_f, xlim, ylim,
                                      n_iterations):
    network, rnn_start_idxs, invariant_equation, *_ = network_define_f(
        xlim, ylim, n_iterations)

    for idx in rnn_start_idxs:
        for idx2 in rnn_start_idxs:
            if idx != idx2:
                temp_eq = MarabouCore.Equation()
                temp_eq.addAddend(1, idx)
                temp_eq.addAddend(-1, idx2)
                network.addEquation(temp_eq)

    if not isinstance(invariant_equation, list):
        invariant_equation = [invariant_equation]

    for i in range(len(invariant_equation)):
        if not prove_invariant2(network, [rnn_start_idxs[i]],
                                [invariant_equation[i]]):
            print("Fail on invariant: ", i)
            return False
        else:
            # Add the invariant hypothesis for the next proving
            network.addEquation(invariant_equation[i])

    return True
Ejemplo n.º 17
0
def test_dump_query():
    """
    This function tests that MarabouCore.solve can be called with all arguments and
    checks that a SAT query is solved correctly. This also tests the InputQuery dump() method
    as well as bound tightening during solving.
    """
    ipq = define_ipq(3.0)

    # An upper bound for variable 2 was not given, so Marabou uses float max, which
    # is much larger than LARGE
    assert ipq.getUpperBound(2) > LARGE

    # Solve
    vals, stats = MarabouCore.solve(ipq, OPT, "")

    # Test dump
    ipq.dump()

    # Marabou should return SAT values, and the dictionary of values should
    # satisfy all upper and lower bounds
    assert not stats.hasTimedOut()
    assert len(vals) > 0
    for var in vals:
        assert vals[var] >= ipq.getLowerBound(var)
        assert vals[var] <= ipq.getUpperBound(var)

    # Marabou should find tighter bounds than LARGE after bound propagation, including
    # for variable 2, where no upper bound was explicitly given
    assert ipq.getUpperBound(1) < LARGE
    assert ipq.getLowerBound(2) > -LARGE
    assert ipq.getUpperBound(2) < LARGE
Ejemplo n.º 18
0
    def evaluateWithMarabou(self,
                            inputValues,
                            filename="evaluateWithMarabou.log",
                            timeout=0):
        """
        Function to evaluate network at a given point using Marabou as solver
        Arguments:
            inputValues: list of (np arrays) representing input to network
            filename: (string) path to redirect output
        Returns:
            outputValues: (np array) representing output of network
        """
        inputVars = self.inputVars  # list of numpy arrays
        outputVars = self.outputVars

        inputDict = dict()
        inputVarList = np.concatenate(inputVars, axis=-1).ravel()
        inputValList = np.concatenate(inputValues).ravel()
        assignList = zip(inputVarList, inputValList)
        for x in assignList:
            inputDict[x[0]] = x[1]

        ipq = self.getMarabouQuery()
        for k in inputDict:
            ipq.setLowerBound(k, inputDict[k])
            ipq.setUpperBound(k, inputDict[k])

        outputDict = MarabouCore.solve(ipq, filename, timeout)
        outputValues = outputVars.reshape(-1).astype(np.float64)
        for i in range(len(outputValues)):
            outputValues[i] = (outputDict[0])[outputValues[i]]
        outputValues = outputValues.reshape(outputVars.shape)
        return outputValues
Ejemplo n.º 19
0
def alpha_to_equation(start_idx, output_idx, initial_val, new_alpha, inv_type):
    '''
    Create an invariant equation according to the simple template \alpha*i \le R_i OR \alpha*i \ge R_i
    :param start_idx: index of the rnn iterator (i)
    :param output_idx: index of R_i
    :param initial_val: If inv_type = GE the max value of R_1 if inv_type = LE the min of R_1
    :param new_alpha: alpha to use
    :param inv_type: Marabou.Equation.GE / Marabou.Equation.LE
    :return: marabou equation
    '''
    # Need the invariant from both side because they are all depndent in each other
    invariant_equation = MarabouCore.Equation(inv_type)
    invariant_equation.addAddend(1, output_idx)  # b_i
    if inv_type == MarabouCore.Equation.LE:
        ge_better = -1
    else:
        # TODO: I don't like this either
        ge_better = 1
        # ge_better = -1

    invariant_equation.addAddend(new_alpha * ge_better, start_idx)  # i
    # TODO: Why isn't it ge_better * initial_val? if it's LE we want:
    # not ( alpha * i + beta \le R ) \iff -alpha * i - beta > R
    invariant_equation.setScalar(initial_val)
    # invariant_equation.dump()
    return invariant_equation
Ejemplo n.º 20
0
    def solve(self, filename="", verbose=True, timeout=0):
        """
        Function to solve query represented by this network
        Arguments:
            filename: (string) path to redirect output to
            verbose: (bool) whether to print out solution
        Returns:
            vals: (dict: int->float) empty if UNSAT, else SATisfying solution
            stats: (Statistics) a Statistics object as defined in Marabou,
                    it has multiple methods that provide information related
                    to how an input query was solved.
        """
        ipq = self.getMarabouQuery()
        vals, stats = MarabouCore.solve(ipq, filename, timeout)
        if verbose:
            if stats.hasTimedOut():
                print("TO")
            elif len(vals) == 0:
                print("UNSAT")
            else:
                print("SAT")
                for j in range(len(self.inputVars)):
                    for i in range(self.inputVars[j].size):
                        print("input {} = {}".format(
                            i, vals[self.inputVars[j].item(i)]))

                for i in range(self.outputVars.size):
                    print("output {} = {}".format(
                        i, vals[self.outputVars.item(i)]))

        return [vals, stats]
Ejemplo n.º 21
0
    def prove_adv_property(self, img_patch, out_idx_max, out_idx_compare, n):
        '''
        prove property on the rnn
        :param img_path: The input img for the network
        :param out_idx_max: which index in the output should be maximum
        :param n: number of iterations
        :return:
        '''
        if img_patch is None:
            # img_patch = np.array([0.1, 0.2, 0.3, 0.4] * 28) # 112
            img_patch = np.array([0.2] * 112)
        img_patch = img_patch[:MAX_SIZE]

        properties = []
        self.set_network_description(img_patch, n)
        assert len(self.out_idx) > out_idx_max

        property_eq = MarabouCore.Equation(MarabouCore.Equation.GE)
        property_eq.addAddend(1, self.out_idx[out_idx_max])
        property_eq.addAddend(-1, out_idx_compare)
        property_eq.setScalar(small)
        properties.append(property_eq)

        rnn_start_idxs = [i - 3 for i in self.rnn_output_idxs]
        return prove_multidim_property(self.network, rnn_start_idxs,
                                       self.rnn_output_idxs,
                                       self.rnn_initial_values, properties)
Ejemplo n.º 22
0
    def prove_rnn_max_property(self, img_patch, rnn_out_idx, max_value, n):
        '''
        prove property on the rnn
        :param rnn_out_idx: one of rnn output idx
        :param max_value: max value for the output
        :param n: number of iterations
        :return:
        '''
        if img_patch is None:
            img_patch = np.array([0.1, 0.2, 0.3, 0.4] * 28)  # 112
            # img_patch = np.array([0.2] * 112)
            # img_patch = np.load('1.pt')
        img_patch = img_patch[:MAX_SIZE]

        self.set_network_description(img_patch, n)

        property_eq = MarabouCore.Equation(MarabouCore.Equation.GE)
        property_eq.addAddend(1, self.rnn_output_idxs[rnn_out_idx])
        property_eq.setScalar(max_value)
        rnn_start_idxs = [i - 3 for i in self.rnn_output_idxs]

        algorithm = IterateAlphasSGD(self.rnn_initial_values, rnn_start_idxs,
                                     self.rnn_output_idxs)
        return prove_multidim_property(self.network, rnn_start_idxs,
                                       self.rnn_output_idxs, [property_eq],
                                       algorithm)
Ejemplo n.º 23
0
def define_negative_sum_invariant_equations(query):
    '''
    Define the equations for invariant, if needs more params should update the query with them
    and we need to define it in the calling function (not the best way but some
    :param query: marabou definition of the positive_sum network, will be changed if needed
    :return: tuple ([base equations], [step equations], [equations that hold if invariant hold])
    '''
    start_param = query.getNumberOfVariables()
    query.setNumberOfVariables(start_param + 1)

    # Add the slack variable, i
    query.setLowerBound(start_param, 0)
    query.setUpperBound(start_param, large)

    # (s_0 f) = 0
    base_hidden_limit_eq = MarabouCore.Equation()
    base_hidden_limit_eq.addAddend(1, 1)
    base_hidden_limit_eq.setScalar(0)

    # (s_i-1 f) <= i - 1 <--> i - (s_i-1 f) >= 1
    hidden_limit_eq = MarabouCore.Equation(MarabouCore.Equation.GE)
    hidden_limit_eq.addAddend(1, start_param)  # i
    hidden_limit_eq.addAddend(-1, 1)  # s_i-1 f
    hidden_limit_eq.setScalar(1)
    # query.addEquation(hidden_limit_eq)

    # negate the invariant we want to prove
    # not(s_1 b <= 1) <--> s_1 b  > 1  <--> s_1 b >= 1 + \epsilon
    base_output_equation = MarabouCore.Equation(MarabouCore.Equation.GE)
    base_output_equation.addAddend(1, 2)
    base_output_equation.setScalar(1 + small)

    # not (s_i b >= i) <--> s_i b < i <--> s_i b -i >= \epsilon
    output_equation = MarabouCore.Equation(MarabouCore.Equation.GE)
    output_equation.addAddend(1, 2)  # s_i b
    output_equation.addAddend(-1, start_param)  # i
    output_equation.setScalar(small)

    # s_i b <= i
    invariant_equation = MarabouCore.Equation(MarabouCore.Equation.LE)
    invariant_equation.addAddend(1, 2)  # s_i b
    invariant_equation.addAddend(-1, start_param)  # i
    invariant_equation.setScalar(0)

    base_invariant_eq = [base_hidden_limit_eq, base_output_equation]
    step_invariant_eq = [hidden_limit_eq, output_equation]
    return (base_invariant_eq, step_invariant_eq, [invariant_equation])
Ejemplo n.º 24
0
def marabou_solve_negate_eq(query):
    vars1, stats1 = MarabouCore.solve(query, "", 0)
    if len(vars1) > 0:
        print("SAT")
        print(vars1)
        return False
    else:
        print("UNSAT")
        return True
Ejemplo n.º 25
0
 def get_equation(self, rnn_model: RnnMarabouModel) -> MarabouCore.Equation:
     eq = MarabouCore.Equation(self.eq_type)
     for (v, c) in self.vars_coefficients:
         if self.on_input:
             eq.addAddend(c, rnn_model.input_idx[v])
         else:
             eq.addAddend(c, rnn_model.output_idx[v])
         eq.setScalar(self.scalar)
     return eq
Ejemplo n.º 26
0
def define_last_network(xlim, ylim, n_iterations):
    '''
    Function that define "last_network" which is an RNN network that outputs the last input parameter
    :param xlim: how to limit the input to the network
    :param ylim: how to limit the output of the network
    :param n_iterations: number of inputs / times the rnn cell will be executed
    :return: (network, [rnn output indices], invariant equation, output equation
    '''
    query = MarabouCore.InputQuery()
    query.setNumberOfVariables(1)

    # x
    query.setLowerBound(0, xlim[0])
    query.setUpperBound(0, xlim[1])

    # rnn, the s_i = 0 * s_i-1 + x * 1
    rnn_idx = add_rnn_cell(query, [(0, 1)], 0, n_iterations)
    y_idx = rnn_idx + 1

    query.setNumberOfVariables(y_idx + 1)
    # y
    query.setLowerBound(y_idx, -large)
    query.setUpperBound(y_idx, large)

    # y - skf  = 0
    output_equation = MarabouCore.Equation()
    output_equation.addAddend(1, y_idx)
    output_equation.addAddend(-1, rnn_idx)
    output_equation.setScalar(0)
    # output_equation.dump()
    query.addEquation(output_equation)

    # s_i-1 f <= xlim[1]
    invariant_equation = MarabouCore.Equation(MarabouCore.Equation.LE)
    invariant_equation.addAddend(1, rnn_idx - 2)  # s_i-1 f
    invariant_equation.setScalar(xlim[1])

    # y <= ylim
    property_eq = MarabouCore.Equation(MarabouCore.Equation.LE)
    property_eq.addAddend(1, y_idx)
    property_eq.setScalar(ylim[1])

    return query, [rnn_idx], invariant_equation, [property_eq]
Ejemplo n.º 27
0
 def clear(self):
     # clear marabou query
     self.ipq = MarabouCore.InputQuery()
     self.ipq.setNumberOfVariables(0)
     self.variable_map = {}  # maps string names -> integers
     self.input_vars = []
     self.output_vars = []
     self.constraints = [
     ]  # log of things that have been asserted, for debug/double check
     self.num_relu = 0
Ejemplo n.º 28
0
    def solve(self, filename="", verbose=True, options=None):
        """
        Function to solve query represented by this network
        Arguments:
            filename: (string) path to redirect output to
            verbose: (bool) whether to print out solution after solve finishes
            timeout: (int) time in seconds when Marabou will time out
            verbosity: (int) determines how much Marabou prints during solving
                    0: print out minimal information
                    1: print out statistics only in the beginning and the end
                    2: print out statistics during solving
        Returns:
            vals: (dict: int->float) empty if UNSAT, else SATisfying solution
            stats: (Statistics) a Statistics object as defined in Marabou,
                    it has multiple methods that provide information related
                    to how an input query was solved.
        """
        ipq = self.getMarabouQuery()
        if options == None:
            options = MarabouCore.Options()
        vals, stats = MarabouCore.solve(ipq, options, filename)
        if verbose:
            if stats.hasTimedOut():
                print("TO")
            elif len(vals) == 0:
                print("UNSAT")
            else:
                print("SAT")
                for inputVarArray in self.inputVars:
                    for inputVar in inputVarArray.flatten():
                        print("input {} = {}".format(inputVar, vals[inputVar]))
                        # print("input var {} input {} = {}".format(i, self.inputVars[j][0][i],vals[self.inputVars[j].item(i)]))
                # for j in range(len(self.inputVars)):
                #     for i in range(self.inputVars[j].size):
                #         print("input {} = {}".format(i, vals[self.inputVars[j].item(i)]))
                #         print("input var {} input {} = {}".format(i, self.inputVars[j][0][i],vals[self.inputVars[j].item(i)]))

                for i in range(self.outputVars.size):
                    print("output {} = {}".format(
                        i, vals[self.outputVars.item(i)]))

        return [vals, stats]
Ejemplo n.º 29
0
def test_solve_partial_arguments():
    """
    This function tests that MarabouCore.solve can be called with partial arguments, 
    and checks that an UNSAT query is solved correctly.
    """
    ipq = define_ipq(-2.0)
    # Test partial arguments to solve
    vals, stats = MarabouCore.solve(ipq, OPT)
    # Assert that Marabou returned UNSAT
    assert not stats.hasTimedOut()
    assert len(vals) == 0
Ejemplo n.º 30
0
        def create_initial_run_equations(loop_indices, rnn_prev_iteration_idx):
            '''
            Zero the loop indcies and the rnn hidden values (the previous iteration output)
            :return: list of equations to add to marabou
            '''
            loop_equations = []
            for i in loop_indices:
                loop_eq = MarabouCore.Equation()
                loop_eq.addAddend(1, i)
                loop_eq.setScalar(0)
                loop_equations.append(loop_eq)

            # s_i-1 f == 0
            zero_rnn_hidden = []
            for idx in rnn_prev_iteration_idx:
                base_hypothesis = MarabouCore.Equation()
                base_hypothesis.addAddend(1, idx)
                base_hypothesis.setScalar(0)
                zero_rnn_hidden.append(base_hypothesis)
            return loop_equations + zero_rnn_hidden