def test_normalize_read_flag(tmpdir):
    """
    Similar tests to those in test_write_read_evaluate(), but turns normalize flag on when creating the
    MarabouNetworkNNet objects.
    Note that in this case inputs and outputs need to be normalized.
    """
    output_filename = tmpdir.mkdir("output_network").join(
        "ACASXU_experimental_v2a_1_9_output.nnet").strpath

    nnet_object = Marabou.read_nnet(filename=NETWORK_FILENAME, normalize=True)
    nnet_object.writeNNet(output_filename)

    nnet_object_a = Marabou.read_nnet(filename=output_filename, normalize=True)

    N = 10
    for i in range(N):
        inputs = nnet_object_a.createRandomInputsForNetwork()

        output1 = nnet_object.evaluateNNet(inputs,
                                           normalize_inputs=True,
                                           normalize_outputs=True)
        output2 = nnet_object_a.evaluateNNet(inputs,
                                             normalize_inputs=True,
                                             normalize_outputs=True)
        assert (output1 == output2).all()

        without_marabou_output = nnet_object_a.evaluate(
            np.array([inputs]), useMarabou=False)[0].flatten()
        assert (output2 == without_marabou_output).all()

        with_marabou_output = nnet_object_a.evaluate(
            np.array([inputs]), useMarabou=True)[0].flatten()
        assert (abs(without_marabou_output - with_marabou_output) < TOL).all()
def createQuery(args):
    if args.input_query:
        query = Marabou.load_query(args.input_query)
        return query, None
    networkPath = args.network

    suffix = networkPath.split('.')[-1]
    if suffix == "nnet":
        network = Marabou.read_nnet(networkPath)
    elif suffix == "pb":
        network = Marabou.read_tf(networkPath)
    elif suffix == "onnx":
        network = Marabou.read_onnx(networkPath)
    else:
        print("The network must be in .pb, .nnet, or .onnx format!")
        return None, None

    if  args.prop != None:
        query = network.getMarabouQuery()
        MarabouCore.loadProperty(query, args.prop)
        return query, network

    if args.dataset == 'mnist':
        encode_mnist_linf(network, args.index, args.epsilon, args.target_label)
        return network.getMarabouQuery(), network
    elif args.dataset == 'cifar10':
        encode_cifar10_linf(network, args.index, args.epsilon, args.target_label)
        return network.getMarabouQuery(), network
    else:
        """
        ENCODE YOUR CUSTOMIZED PROPERTY HERE!
        """
        print("No property encoded!")

        return network.getMarabouQuery(), network
Exemple #3
0
 def check_sat(self,
               output_filename="",
               timeout=0,
               vars_of_interest=[],
               verbose=True,
               dnc=True):
     # todo: redirect output to cwd/maraboulogs/
     if (not dnc) or (self.n_worker == 1):
         options = Marabou.createOptions(timeoutInSeconds=timeout)
     else:  # dnc
         options = Marabou.createOptions(timeoutInSeconds=timeout,
                                         dnc=True,
                                         verbosity=0,
                                         initialDivides=2,
                                         initialTimeout=120,
                                         numWorkers=self.n_worker)
         # options = Marabou.createOptions(timeoutInSeconds=timeout, dnc=True, verbosity=0,
         #                                 initialDivides=2, initialTimeout=120, numWorkers=self.n_worker,
         #                                 biasStrategy="estimate", focusLayer=1000, lookAheadPreprocessing=True)
     MarabouCore.saveQuery(self.ipq, "query_dump")
     vals, stats = MarabouCore.solve(self.ipq, options, output_filename)
     self.convert_sat_vals_to_mc_vars(vals)
     if verbose:
         self.print_results(vals, stats, vars_of_interest=vars_of_interest)
     if stats.hasTimedOut():
         return Result.TIMEOUT, self.vals_with_mc_vars, stats
     elif len(vals) == 0:
         return Result.UNSAT, self.vals_with_mc_vars, stats
     else:  # len(vals) /== 0
         return Result.SAT, self.vals_with_mc_vars, stats
Exemple #4
0
def get_gurobi_bounds(nn_file, input_rng, layer, output_file, pattern=None):
    """
    Find bounds for network given a property, in specific layer
    :param nn_file: original network
    :param input_rng: input region property
    :param layer: layer to get the bounds of
    :param output_file: file to which gurobi writes the bounds
    :param pattern: pattern of neurons in 'layer'.
    :return: lower bounds, upper_bounds (for the required layer)
    """
    lower_bounds = []
    upper_bounds = []

    #update bounds by the input region
    nn = Marabou.read_nnet(nn_file)
    for i in range(input_rng.dim):
        nn.setLowerBound(nn.inputVars[0][i], input_rng.lower_bounds[i])
        nn.setUpperBound(nn.inputVars[0][i], input_rng.upper_bounds[i])

    #set bounds in the inner layer (if a pattern is given)
    if pattern is not None:
        first = 100*(layer-1)+5
        for j in range(len(pattern)):
            if pattern[j] == '0': #neuron is OFF
                nn.setUpperBound(first+j, 0)
            elif pattern[j] == '1': #neuron is ON
                nn.setLowerBound(first+j, 0)
            #else neuron is '2' - no restriction

    nn.writeNNet("net_gurobi_comp.nnet")
    #use Marabou to call gurobi
    options = Marabou.createOptions(milpSolverTimeout=1, numSimulations=1)
    nn.solve(output_file, options=options)

    #read results from output_file
    with open(output_file) as fl:
        lines = fl.readlines()
        i=0
        for i, l in enumerate(lines):
            if "Layer {}".format(2*layer) in l:
                break
        i += 1
        line = lines[i]
        while "Neuron" in line:
            parts = line.split()
            lower_bounds.append(float(parts[2][:-1]))
            upper_bounds.append(float(parts[4]))
            i += 1
            line = lines[i]
    return lower_bounds, upper_bounds
def shrink(filename, listOfBounded, assignment, candidateVariables,
           input_bounds, output_constraints, weights_softmax, window_size,
           norm, verbose):
    global howmanyentails
    essential_variables = candidateVariables.copy()
    listOfBoundedCopy = listOfBounded.copy()
    for j in candidateVariables:
        essential_variables.remove(j)
        network = Marabou.read_tf(filename,
                                  modelType='savedModel_v2',
                                  savedModelTags=['serving_default'])
        listOfBoundedCopy.append(j)
        flatListOfBounded = [y for x in listOfBoundedCopy for y in x]
        const_ranges = [
            n for n in range(len(assignment)) if n not in flatListOfBounded
        ]
        howmanyentails = howmanyentails + 1

        res = Entails(const_ranges, network, list(assignment.values()),
                      input_bounds, output_constraints, weights_softmax,
                      window_size, norm, verbose)
        network.clear()
        if len(res) == 0:
            essential_variables.append(j)
        listOfBoundedCopy.remove(j)

    return essential_variables
Exemple #6
0
    def _find_counterexample(self,
                             x: np.array,
                             y: np.array,
                             epsilon: float,
                             x_index: int = None
                             ) -> typing.Tuple[bool, int, np.array]:
        '''Finds the the counterexample for an image at a given epsilon

        Args:
            x (np.array): The image
            y (np.array): Label for the image (onehot encoded)
            epsilon (float): the epsilon value
            x_index (int, optional): Index of x (for reference only). Defaults to None.

        Returns:
            tuple[bool, int, np.array]: (verified, predicted_label, counterexample)
        '''
        actual_label = np.argmax(y)
        predicted_label = actual_label
        verified = True
        counterexample = None
        for output_index in range(y.shape[0]):
            if actual_label == output_index:
                continue
            # load model, encode the transform as a marabou input query, and solve query
            network = self._load_model(self._model_path)
            network = self._transform_fn(network, x, epsilon, output_index,
                                         **self._transform_args)
            marabou_options = {
                'verbosity': DEFAULTS['marabou_verbosity'],
                **self._marabou_options
            }
            vals, stats = network.solve(
                options=Marabou.createOptions(**marabou_options),
                verbose=(self._verbosity > 3))
            # check results
            if stats.hasTimedOut():
                # TIMEOUT
                verified = False
                assert False, f'Timeout occurred ({f"x_index={x_index}" if x_index is not None else ""};output={output_index}@epsilon={epsilon})'
            elif len(vals) == 0:
                # UNSAT
                if self._verbosity > 2:
                    print(
                        f'image:{x_index};output:{output_index}@epsilon:{epsilon} (UNSAT)'
                    )
                continue
            else:
                # SAT (counterexample found)
                if self._verbosity > 2:
                    print(
                        f'image:{x_index};output:{output_index}@epsilon:{epsilon} (SAT)'
                    )
                counterexample = np.array([
                    vals[i] for i in range(self.n_pixels)
                ]).reshape(self.image_shape)
                predicted_label = output_index
                verified = False
                break
        return verified, predicted_label, counterexample
Exemple #7
0
def create_network(filename):
    output_op_name = "model/pi/add"
    input_op_names = ["input/Ob"]
    network = Marabou.read_tf(
        filename, inputName=input_op_names, outputName=output_op_name
    )  #,savedModel = True,outputName = "save_1/restore_all", savedModelTags=[tag_constants.SERVING] )
    return network, input_op_names, output_op_name
Exemple #8
0
def create_network(filename):

    input_op_names = ["input"]
    output_op_name = "y_out"

    network = Marabou.read_tf(filename, inputName=input_op_names,outputName=output_op_name)
    return network, input_op_names, output_op_name
def create_network(filename, k):
    output_op_name = "model/pi/add"

    input_op_names = ["input/Ob"]
    network = Marabou.read_tf_k_steps(filename,
                                      k,
                                      inputName=input_op_names,
                                      outputName=output_op_name)
    return network, input_op_names, output_op_name
Exemple #10
0
    def _load_network(self) -> Marabou.MarabouNetwork:
        '''loads the network as a MarabouNetwork object

        Returns:
            Marabou.MarabouNetwork: the MarabouNetwork object instance.
        '''
        valid_exts = ('.nnet', '', '.pb', '.onnx')
        ext = get_file_extension(self._network_path)
        assert ext in valid_exts, 'Model must be in nnet, pb, or onnx format'
        if ext == '.nnet':
            return Marabou.read_nnet(self._network_path,
                                     **self._network_options)
        elif ext in ('', '.pb'):
            return Marabou.read_tf(self._network_path, **self._network_options)
        elif ext == '.onnx':
            return Marabou.read_onnx(self._network_path,
                                     **self._network_options)
        return None
def create_network(filename, k):
    input_op_names = ["actor/InputData/X"]
    output_op_name = "actor/FullyConnected_4/BiasAdd"

    network = Marabou.read_tf_k_steps(filename,
                                      k,
                                      inputName=input_op_names,
                                      outputName=output_op_name)
    return network, input_op_names, output_op_name
def test_write_read_evaluate(tmpdir):
    """
    Test writeNNet by writing an nnet into a file, reading from that file, and comparing by evaluating on
    random inputs.
    """
    output_filename = tmpdir.mkdir("output_network").join(
        "ACASXU_experimental_v2a_1_9_output.nnet").strpath

    nnet_object = Marabou.read_nnet(filename=NETWORK_FILENAME)
    nnet_object.writeNNet(output_filename)

    nnet_object_a = Marabou.read_nnet(filename=output_filename)

    N = 10
    for i in range(N):
        inputs = nnet_object_a.createRandomInputsForNetwork()

        output1 = nnet_object.evaluateNNet(inputs,
                                           normalize_inputs=False,
                                           normalize_outputs=False)
        output2 = nnet_object_a.evaluateNNet(inputs,
                                             normalize_inputs=False,
                                             normalize_outputs=False)
        assert (output1 == output2).all()

        # Compare evaluation with and without Marabou
        without_marabou_output = nnet_object_a.evaluate(
            np.array([inputs]), useMarabou=False)[0].flatten()
        with_marabou_output = nnet_object_a.evaluate(
            np.array([inputs]), useMarabou=True)[0].flatten()

        # Assert that all of the above agree up to TOL
        assert (output2 == without_marabou_output).all()
        assert (abs(without_marabou_output - with_marabou_output) < TOL).all()

        # Adding input and output normalization
        output1 = nnet_object.evaluateNNet(inputs,
                                           normalize_inputs=True,
                                           normalize_outputs=True)
        output2 = nnet_object_a.evaluateNNet(inputs,
                                             normalize_inputs=True,
                                             normalize_outputs=True)
        assert (output1 == output2).all()
Exemple #13
0
 def solve(self, timeout=default_timeout, dnc=False):
     options = Marabou.createOptions(timeoutInSeconds=timeout, verbosity=self.__marabou_verbosity)
     vals, stats = self.network.solve(verbose=bool(self.__marabou_verbosity), options=options)
     assignment = ([], [])
     if len(vals) > 0:
         for i in range(self.get_num_inputs()):
             assignment[0].append(vals[self.get_input_var(i)])
         for i in range(self.get_num_outputs()):
             assignment[1].append(vals[self.get_output_var(i)])
     return assignment, stats
Exemple #14
0
 def __init__(self, network_path, lbs=None, ubs=None, marabou_verbosity=0, marabou_logdir=None):
     self.__original_nnet = Marabou.read_nnet(network_path)
     self.network = copy.deepcopy(self.__original_nnet)
     if not(lbs is None and ubs is None):
         assert(len(lbs) == len(ubs))
         assert(len(lbs) == self.get_num_inputs())
         self.set_lower_bounds(lbs)
         self.set_upper_bounds(ubs)
     self.__marabou_verbosity = marabou_verbosity
     self.__marabou_logfile = os.path.join(marabou_logdir, 'marabou.log') if marabou_logdir else None
Exemple #15
0
    def _load_model(self, model_path: str) -> Marabou.MarabouNetwork:
        '''Loads a tensorflow, nnet, or onnx model as a MarabouNetwork

        Args:
            model_path (str): Path to the verification model

        Returns:
            MarabouNetwork: the MarabouNetwork object
        '''
        valid_exts = ('.nnet', '', '.pb', '.h5', '.hdf5', '.onnx')
        ext = _get_file_extension(model_path)
        assert ext in valid_exts, 'Model must be .nnet, .pb, .h5, or .onnx'
        if ext == '.nnet':
            return Marabou.read_nnet(model_path, **self._model_args)
        elif ext in ('', '.pb', '.h5', '.hdf5'):
            return Marabou.read_tf(model_path, **self._model_args)
        elif ext == '.onnx':
            return Marabou.read_onnx(model_path, **self._model_args)
        return None
Exemple #16
0
    def evaluate(self, x: Iterable[Number]) -> Iterable[Number]:
        '''Makes a prediction

        Args:
            x (Iterable[Number]): The input (x)

        Returns:
            Iterable[Number]: The network's prediction.
        '''
        options = Marabou.createOptions(verbosity=bool(
            self._marabou_verbosity > 1))
        return self.network.evaluate([x], options=options)[0]
def test_bound_getters():
    """
    Test getVariable, getLowerBound, getUpperBound
    """
    nnet_object = Marabou.read_nnet(filename=NETWORK_FILENAME, normalize=False)

    ipq = nnet_object.getMarabouQuery()

    num_input_vars = ipq.getNumInputVariables()
    assert num_input_vars == nnet_object.inputSize

    # Test getVariable and numberOfVariables
    assert ipq.getNumberOfVariables() == nnet_object.numberOfVariables()
    assert nnet_object.getVariable(1, 1, b=True) == num_input_vars + 1
    assert nnet_object.getVariable(
        1, 1, b=False) == num_input_vars + nnet_object.layerSizes[1] + 1

    # Testing the variable and bound getters on a random hidden node
    random_hidden_layer = np.random.randint(1, nnet_object.numLayers)
    random_node = np.random.randint(
        0, nnet_object.layerSizes[random_hidden_layer])
    var_b = nnet_object.getVariable(random_hidden_layer, random_node)
    var_f = nnet_object.getVariable(random_hidden_layer, random_node, b=False)
    assert var_b == nnet_object.nodeTo_b(random_hidden_layer, random_node)
    assert var_f == nnet_object.nodeTo_f(random_hidden_layer, random_node)
    assert nnet_object.getLowerBound(random_hidden_layer, random_node,
                                     b=False) == 0
    if not nnet_object.upperBoundExists(var_b):
        assert not nnet_object.upperBoundExists(var_f)
        assert nnet_object.getUpperBound(
            random_hidden_layer, random_node, b=False) is None
        assert nnet_object.getUpperBound(
            random_hidden_layer, random_node, b=True) is None
    if not nnet_object.lowerBoundExists(var_b):
        assert nnet_object.getLowerBound(random_hidden_layer,
                                         random_node) is None

    # Test getLowerBoundsForLayer and getUpperBoundsForLayer on the input layer
    input_lower_bounds = [
        ipq.getLowerBound(var) for var in range(num_input_vars)
    ]
    assert input_lower_bounds == nnet_object.getLowerBoundsForLayer(0, b=False)
    input_upper_bounds = [
        ipq.getUpperBound(var) for var in range(num_input_vars)
    ]
    assert input_upper_bounds == nnet_object.getUpperBoundsForLayer(0, b=False)

    # Test nnet_object.getBoundsForLayer and that getVariable always returns the f variable for layer == 0
    assert nnet_object.getBoundsForLayer(0) == (input_lower_bounds,
                                                input_upper_bounds)
Exemple #18
0
def split_check_unsat(network_PATH,
                      property_path,
                      split_level,
                      input,
                      print=True):
    '''
    splits network, check if they fulfill our requirements (n1 unsat negated activation pattern, n2 unsat property) :
    '''
    #create network
    network = Marabou.MarabouNetworkNNet(network_PATH)
    # split network
    n1, n2 = split_network_marabou(network, split_level)
    sat1, sat2 = check_split_unsat(n1, n2, property_path, input)
    return sat1 == sat2
Exemple #19
0
def create_pattern(nnet_file1, inp):
    """
    Create a pattern with the first part of the net given an input point.
    :param nnet_file1: part1 of the original nnet
    :param inp: input point
    :return: pattern (string of 0, 1)
    """
    prop = ""
    nn = Marabou.read_nnet(nnet_file1)
    output = nn.evaluate([inp])[0]
    for i, j in enumerate(output):
        if j > 0:
            prop += "1"
        else:
            prop += "0"
    return prop
Exemple #20
0
def load_actor_network(frozen_actor_path):
    """
    loads the actor network from saved file

    arguments:
        frozen_actor_path: a string with the path of the frozen network
    returns:
        network: the network loaded in Marabou
        network_inputs: a list of variable indices which correspond to the neural network inputs
        network_outputs: a list of variable indices which correspond to the neural network outputs
    """
    network = Marabou.read_tf(frozen_actor_path, savedModel=False)
    #   inputName=['actor_state_input'])
    #   outputName='sequential/actor_outputs/BiasAdd')
    network_inputs = network.inputVars[0][0]
    network_outputs = network.outputVars[0]
    return (network, network_inputs, network_outputs)
Exemple #21
0
def try_to_find_input_comp_check(ang,angvel,uk,file):
	# This file should simplify the check
	netcheck = Marabou.read_nnet(file)
	# cos th: we assume this to be always 1.0
	netcheck.setLowerBound(netcheck.inputVars[0][0], 1.0)
	netcheck.setUpperBound(netcheck.inputVars[0][0], 1.0)
	# sin th: we assume that the small angle approximation holds
	netcheck.setLowerBound(netcheck.inputVars[0][1], ang)
	netcheck.setUpperBound(netcheck.inputVars[0][1], ang)
	# angular velocity:
	netcheck.setLowerBound(netcheck.inputVars[0][2], angvel)
	netcheck.setUpperBound(netcheck.inputVars[0][2], angvel)
	# input: is normalized in between 1 and -1:
	netcheck.setLowerBound(netcheck.inputVars[0][3], uk)
	netcheck.setUpperBound(netcheck.inputVars[0][3], uk)

	return netcheck
    def evaluateEpsilon(self, epsilon, network, prediction):
        outputVars = network.outputVars
        abs_epsilons = list()
        preds = list()
        predIndices = np.flip(np.argsort(prediction, axis=1), axis=1)
        for i in range(outputVars.shape[0]):
            preds.append((predIndices[i][0], predIndices[i][1]))
        n, m = network.epsilons.shape
        print(n, m)
        for i in range(n):
            for j in range(m):
                if j in list(chain.from_iterable(preds)):
                    epsilon_var = network.epsilons[i][j]
                    network.setUpperBound(epsilon_var, epsilon)
                    network.setLowerBound(epsilon_var, -epsilon)
                    abs_epsilon_var = self.epsilonABS(network, epsilon_var)
                    abs_epsilons.append(abs_epsilon_var)
                else:
                    epsilon_var = network.epsilons[i][j]
                    network.setUpperBound(epsilon_var, 0)
                    network.setLowerBound(epsilon_var, 0)

        e = MarabouUtils.Equation(
            EquationType=MarabouUtils.MarabouCore.Equation.LE)
        for i in range(len(abs_epsilons)):
            e.addAddend(1, abs_epsilons[i])
        e.setScalar(epsilon)
        network.addEquation(e)

        for i in range(outputVars.shape[0]):
            MarabouUtils.addInequality(
                network,
                [outputVars[i][preds[i][0]], outputVars[i][preds[i][1]]],
                [1, -1], 0)

        options = Marabou.createOptions(numWorkers=6, dnc=False)
        stats = network.solve(verbose=False, options=options)
        newOut = predIndices[:, 1]
        if stats[0]:
            return sat, stats, newOut
        else:
            return unsat, stats, newOut
Exemple #23
0
    def solve(
        self
    ) -> Tuple[Tuple[List[Number], List[Number]], MarabouCore.Statistics]:
        '''Solves the input query encoded in the MarabouNetwork object

        Returns:
            Tuple[Tuple[List[Number], List[Number]], MarabouCore.Statistics]: tuple containing a counterexample and marabou statistics.
        '''
        options = {
            'timeoutInSeconds': self._marabou_timeout,
            'verbosity': self._marabou_verbosity,
            **self._marabou_options
        }
        vals, stats = self.network.solve(
            verbose=bool(self._marabou_verbosity),
            options=Marabou.createOptions(**options))
        assignment = ([], [])
        if len(vals) > 0:
            for i in range(self.num_inputs):
                assignment[0].append(vals[self.get_input_var(i)])
            for i in range(self.num_outputs):
                assignment[1].append(vals[self.get_output_var(i)])
        return assignment, stats
  
This file is part of the Marabou project.
Copyright (c) 2017-2019 by the authors listed in the file AUTHORS
in the top-level source directory) and their institutional affiliations.
All rights reserved. See the file COPYING in the top-level source
directory for licensing information.
'''

from maraboupy import Marabou
import numpy as np

# %%
# This network has inputs x0, x1, and was trained to create outputs that approximate
# y0 = abs(x0) + abs(x1), y1 = x0^2 + x1^2
filename = "../../resources/tf/frozen_graph/fc1.pb"
network = Marabou.read_tf(filename)

# %%
# Or, you can specify the operation names of the input and output operations.
# The default chooses the placeholder operations as input and the last operation as output
inputNames = ['Placeholder']
outputName = 'y_out'
network = Marabou.read_tf(filename=filename,
                          inputNames=inputNames,
                          outputName=outputName)

# %%
# Get the input and output variable numbers; [0] since first dimension is batch size
inputVars = network.inputVars[0][0]
outputVars = network.outputVars[0]
Exemple #25
0
import sys
sys.path.append("/cs/labs/guykatz/yoni_mantzur/marabou")

from maraboupy import MarabouNetwork
from maraboupy import MarabouUtils, MarabouCore, Marabou
import numpy as np

network = Marabou.read_tf(
    '/cs/labs/guykatz/yoni_mantzur/marabou/resources/tf/frozen_graph/sigmoids/mnist_20.pb'
)  # type: MarabouNetwork.MarabouNetwork

# Get the input and output variable numbers; [0] since first dimension is batch size
inputVars = network.inputVars[0][0]
outputVars = network.outputVars[0]

large = 100.0
delta = 0.3

x = [
    0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
    0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
    0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
    0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
    0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
    0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
    0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
    0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
    0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
    0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
    0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.32941177, 0.7254902,
    0.62352943, 0.5921569, 0.23529412, 0.14117648, 0., 0., 0., 0., 0., 0., 0.,
Exemple #26
0
import numpy as np

## %
# Path to Marabou folder if you did not export it

# sys.path.append('/home/USER/git/Marabou')
from maraboupy import Marabou
from maraboupy.MarabouCore import StatisticsUnsignedAttribute

# %%
# Path to NNet file
nnetFile = "../../src/input_parsers/acas_example/ACASXU_run2a_1_1_tiny_2.nnet"

# %%
# Load the network from NNet file, and set a lower bound on first output variable
net1 = Marabou.read_nnet(nnetFile)
net1.setLowerBound(net1.outputVars[0][0], .5)

# %%
# Solve Marabou query
exitCode, vals1, stats1 = net1.solve()


# %%
# Example statistics
stats1.getUnsignedAttribute(StatisticsUnsignedAttribute.NUM_SPLITS)
stats1.getTotalTimeInMicro()


# %%
# Eval example
Exemple #27
0
Top contributors (to current version):
  - Kyle Julian
  
This file is part of the Marabou project.
Copyright (c) 2017-2019 by the authors listed in the file AUTHORS
in the top-level source directory) and their institutional affiliations.
All rights reserved. See the file COPYING in the top-level source
directory for licensing information.
'''

from maraboupy import Marabou
import numpy as np

# %%
# Set the Marabou option to restrict printing
options = Marabou.createOptions(verbosity=0)

# %%
# Fully-connected network example
# -------------------------------
#
# This network has inputs x0, x1, and was trained to create outputs that approximate
# y0 = abs(x0) + abs(x1), y1 = x0^2 + x1^2
print("Fully Connected Network Example")
filename = "../../resources/onnx/fc1.onnx"
network = Marabou.read_onnx(filename)

# %%
# Or, you can specify the operation names of the input and output operations.
# The default chooses the placeholder operations as inputs and the last operation as output
inputName = 'Placeholder:0'
Exemple #28
0
import numpy as np
from maraboupy import MarabouUtils
from maraboupy import Marabou
import argparse

parser = argparse.ArgumentParser()
parser.add_argument('--model',
                    default='ACASXU_2_9',
                    help='the name of the model')
args = parser.parse_args()

model_name = args.model
file_name = './ProtobufNetworks/{}.pb'.format(model_name)
# nnet_file_name = "../Marabou/resources/nnet/acasxu/ACASXU_experimental_v2a_2_9.nnet"

net1 = Marabou.read_tf(file_name)
# net2 = Marabou.read_nnet(nnet_file_name)

# s = 0
# for i in range(1000):
#     a = 2*np.random.random_sample((1,5))-1
#     print(a)
#     b = net1.evaluate(a)
#     c = net2.evaluate(a)
#     print(b-c)
#     s = s + np.linalg.norm(b-c, 1)
# # 0.0001767427448647568
# print(s)

# Bounds for input 0: [ -0.3284228772, 0.6798577687 ]
# Bounds for input 1: [ -0.5000000551, 0.5000000551 ]
Exemple #29
0
    def verify(self, network: networks.NeuralNetwork, prop: Property) -> (bool, typing.Optional[Tensor.Tensor]):
        """
        Verify that the neural network of interest satisfy the property given as argument
        using the Marabou verification tool.

        Parameters
        ----------
        network : NeuralNetwork
            The neural network to train.
        prop : Dataset
            The property which the neural network must satisfy.

        Returns
        ----------
        (bool, Optional[Tensor])
            True and None if the neural network satisfy the property, False and the counterexample otherwise.

        """
        if isinstance(prop, SMTLIBProperty):
            targeted, bounds, target = utilities.parse_linf_robustness_smtlib(prop.smtlib_path)
        elif isinstance(prop, LocalRobustnessProperty):
            targeted = prop.targeted
            target = prop.target
            bounds = []
            for i in range(len(prop.data)):

                if prop.data[i] + prop.epsilon > prop.bounds[i][1]:
                    ub = prop.bounds[i][1]
                else:
                    ub = prop.data[i] + prop.epsilon

                if prop.data[i] - prop.epsilon < prop.bounds[i][0]:
                    lb = prop.bounds[i][0]
                else:
                    lb = prop.data[i] - prop.epsilon

                bounds.append((lb, ub))
        else:
            raise NotImplementedError

        if not targeted:
            raise NotImplementedError

        onnx_rep = cv.ONNXConverter().from_neural_network(network)
        onnx.save_model(onnx_rep.onnx_network, "temp/onnx_network.onnx")

        marabou_onnx_net = Marabou.read_onnx("temp/onnx_network.onnx")
        os.remove("temp/onnx_network.onnx")
        input_vars = marabou_onnx_net.inputVars[0][0]
        output_vars = marabou_onnx_net.outputVars

        assert(len(bounds) == len(input_vars))

        for i in range(len(input_vars)):
            marabou_onnx_net.setLowerBound(input_vars[i], bounds[i][0])
            marabou_onnx_net.setUpperBound(input_vars[i], bounds[i][1])

        for i in range(len(output_vars)):
            if i != target:
                MarabouUtils.addInequality(marabou_onnx_net, [output_vars[i], output_vars[target]], [1, -1], 0)

        options = MarabouCore.Options()
        # options._verbosity = 2

        vals, stats = marabou_onnx_net.solve(options=options)

        counterexample = None
        if not vals:
            sat = False
        else:
            sat = True
            counterexample = [val for val in vals.values()]
            counterexample = np.array(counterexample)

        return sat, counterexample
Exemple #30
0
def create_network():

    true_stdout = sys.stdout

    run_number = str(int(np.ceil(np.random.rand() * 10000)))
    fnumber = "3332"
    fname = "graph_def_"  #real_controller_2_steps_"
    nsteps = 4
    fprefix = "/Users/Chelsea/Dropbox/AAHAA/src/OverApprox/nnet_files"
    frozen_graph = os.path.join(fprefix, fname + fnumber + ".pb")
    meta_data = os.path.join(fprefix, "meta_data_" + fnumber + ".txt")

    # make path in which to store outputs
    network_dir = '/Users/Chelsea/Dropbox/AAHAA/src/OverApprox/MarabouLogs/network_' + fnumber
    if not os.path.exists(network_dir):
        os.mkdir(network_dir)

    marabou_log_dir = os.path.join(network_dir,
                                   'run_' + run_number + '_marabou.log')
    print(marabou_log_dir)
    if os.path.exists(marabou_log_dir):  # don't overwrite old data!!!
        raise FileExistsError

    # redirect to file
    peripheral_logfile = os.path.join(network_dir,
                                      'run_' + run_number + '_peripheral.log')
    sys.stdout = open(peripheral_logfile, 'w')

    output_op_name, inputs, outputs = read_inout_metadata(meta_data)

    network = Marabou.read_tf(frozen_graph, outputName=output_op_name)
    network.name = frozen_graph[0:-3]  # strip off .pb at the end

    inputVars = network.inputVars
    print("inputVars:", inputVars)
    outputVars = network.outputVars
    print("outputVars: ", outputVars)
    outputVarList = list(np.array(outputVars).flatten())

    d1, d2 = map_inputs_fromVarMap(
        varMapOpstoNames(network.varMap),
        inputs)  # for use with other networks that have not been condensed

    # set adjustable vars for DnC
    network.adjustable_inputs = get_adjustable_vars(
        ["initial_values/theta_0", "initial_values/theta_dot_0"], d2)

    network.dependencies = get_dependencies(nsteps)

    # set bounds on outputs
    bounds = set_bounds(network, d1, d2, bounds_2_5, network_dir, run_number)
    network.bounds = bounds
    network.nsteps = nsteps

    # make sure all lower bounds are less than all upper bounds
    check_bounds(network.upperBounds, network.lowerBounds)

    sys.stdout = true_stdout
    network.peripheral_logfile = peripheral_logfile

    return network, peripheral_logfile, marabou_log_dir