Exemplo n.º 1
0
def encodeOneHotExample():
    invars = encode_inputs([-1, 0, 1], [-1, 0, 1])
    outs, vars, constraints = encode_one_hot(invars, 1, '')

    vars = [invars, vars, outs]
    constraints = [constraints]

    pretty_print(vars, constraints)

    print('\n### now with interval arithmetic ###')
    interval_arithmetic(constraints)
    pretty_print(vars, constraints)
Exemplo n.º 2
0
def encodeMaxpoolExample():
    invars = encode_inputs([0, 1, 2], [1, 2, 3])
    outs, deltas, ineqs = encode_maxpool_layer(invars, 1, '')

    vars = [invars, deltas, outs]
    constraints = [ineqs]

    pretty_print(vars, constraints)

    print('\n### now with interval arithmetic ###')
    interval_arithmetic(constraints)
    pretty_print(vars, constraints)
Exemplo n.º 3
0
def encodeEquivalenceExample():
    inputs = [-3 / 2, 0]

    weights1 = [[1, 4], [2, 5], [3, 6]]
    weights2 = [[1, 5], [2, 5], [3, 6]]

    layers1 = [('relu', 2, weights1)]
    layers2 = [('relu', 2, weights2)]

    vars, constraints = encode_equivalence(layers1, layers2, inputs, inputs)

    pretty_print(vars, constraints)
    print('\n### now smtlib ###\n')
    print(print_to_smtlib(vars, constraints))
Exemplo n.º 4
0
def encodeRankingExample():
    invars = encode_inputs([-1, 0, 1], [-1, 0, 1])
    permute_matrix, vars, constraints = encode_ranking_layer(invars, 1, '')

    vars = [invars, vars, permute_matrix]
    constraints = [constraints]

    pretty_print(vars, constraints)

    print('\n### now with interval arithmetic ###')
    interval_arithmetic(constraints)
    pretty_print(vars, constraints)

    return vars, constraints
Exemplo n.º 5
0
def exampleEncodeSimpleCancer():
    # encode simple cancer classifier
    # result for given input should be 19.67078
    kl = KerasLoader()
    kl.load('ExampleNNs/cancer_simple_lin.h5')

    inputs = [8, 10, 10, 8, 6, 9, 3, 10, 10]
    layers = kl.getHiddenLayers()

    vars, constraints = encodeNN(layers, inputs, inputs, '')

    interval_arithmetic(constraints)

    pretty_print(vars, constraints)
Exemplo n.º 6
0
def exampleEncodeCancer(with_interval_arithmetic=True):
    # smallest inputs for whole training data
    input_los = [
        -2.019404, -2.272988, -1.977589, -1.426379, -3.176344, -1.664312,
        -1.125696, -1.262871, -2.738225, -1.865718, -1.024522, -1.569514,
        -1.016081, -0.6933525, -1.862462, -1.304206, -1.012913, -1.977069,
        -1.544220, -1.080050, -1.704360, -2.218398, -1.673608, -1.188201,
        -2.711807, -1.468356, -1.341360, -1.754014, -2.128278, -1.598903
    ]
    # highest inputs for all training data
    input_his = [
        3.963628, 3.528104, 3.980919, 5.163006, 3.503046, 4.125777, 4.366097,
        3.955644, 4.496561, 5.105021, 8.697088, 6.788612, 9.410281, 10.52718,
        5.747718, 6.308377, 11.73186, 6.984494, 4.999672, 10.02360, 4.049783,
        3.938555, 4.261315, 5.758096, 3.988374, 5.270909, 4.936910, 2.695096,
        5.934052, 6.968987
    ]
    input_malign_zero_lo = input_los
    # highest score in feature 3 for benign data is 0.945520 -> set input vector higher than that
    # -> if NN works correctly, then input should be classified as malign (label = 1)
    input_malign_zero_lo[0] = 1.1
    input_malign_zero_lo[3] = 1.25
    input_malign_zero_lo[4] = 2.96
    input_malign_zero_lo[7] = 1.45
    input_malign_zero_lo[20] = 0.9
    vars, constraints = encode_from_file('ExampleNNs/cancer_lin.h5',
                                         input_malign_zero_lo, input_his)

    if with_interval_arithmetic:
        interval_arithmetic(constraints)

    pretty_print(vars, constraints)

    print('\n### smtlib ###\n')
    # for proof that nn is correct manually insert constraint:
    # (assert (<= x_3_0 0))
    # if this can be satisfied, then a counterexample to correctness has been found
    print(print_to_smtlib(vars, constraints))

    return vars, constraints
Exemplo n.º 7
0
    def check_equivalence_layer(self, layer_idx):
        opt_vars = []
        opt_constrs = []
        if layer_idx == 0:
            opt_vars += self.input_layer.get_outvars()[:]
        else:
            a_outs = self.a_layers[layer_idx - 1].get_outvars()[:]
            b_outs = self.b_layers[layer_idx - 1].get_outvars()[:]
            opt_vars += a_outs + b_outs

            # at this stage we assume the previous layers to be equivalent
            for avar, bvar in zip(a_outs, b_outs):
                opt_constrs += [Linear(avar, bvar)]

        bounds = []

        for i, (a_var, a_constr, b_var, b_constr) in enumerate(
                zip(self.a_layers[layer_idx].get_optimization_vars(),
                    self.a_layers[layer_idx].get_optimization_constraints(),
                    self.b_layers[layer_idx].get_optimization_vars(),
                    self.b_layers[layer_idx].get_optimization_constraints())):
            diff = Variable(layer_idx, i, 'E', 'diff')
            diff_constr = Linear(Sum([a_var, Neg(b_var)]), diff)

            if i == 1:
                pretty_print(opt_vars + [a_var, b_var, diff],
                             opt_constrs + [a_constr, b_constr, diff_constr])

            lb, ub = self.optimize_variable(
                diff, opt_vars + [a_var, b_var, diff],
                opt_constrs + [a_constr, b_constr, diff_constr])
            diff.update_bounds(lb, ub)

            bounds.append((lb, ub))

        return bounds
Exemplo n.º 8
0
 def pretty_print(self):
     pretty_print(self.get_vars(), self.get_constraints())