def forward_symbolic(self, var, inputs):
        x = inputs
        layer1_weights, layer2_weights, layer3_weights = var

        layer1 = []
        for i in range(self.linear1.weight.size(0)):
            ex = 0
            for j in range(self.linear1.weight.size(1)):
                ex += inputs[j] * layer1_weights[i][j]
            layer1.append(dreal.tanh(ex))

        layer2 = []
        for i in range(self.linear2.weight.size(0)):
            ex = 0
            for j in range(self.linear2.weight.size(1)):
                ex += layer1[j] * layer2_weights[i][j]
            layer2.append(dreal.tanh(ex))

        layer3 = []
        for i in range(self.linear3.weight.size(0)):
            ex = 0
            for j in range(self.linear3.weight.size(1)):
                ex += layer2[j] * layer3_weights[i][j]
            layer3.append(ex)

        softmax = [
            dreal.exp(layer3[0]) /
            (dreal.exp(layer3[0]) + dreal.exp(layer3[1])),
            dreal.exp(layer3[1]) /
            (dreal.exp(layer3[0]) + dreal.exp(layer3[1]))
        ]
        return softmax
Exemplo n.º 2
0
def hyper_tan_dr(x):
    y = x.copy()
    # original_shape = y.shape
    # y = y.reshape(max(y.shape[0], y.shape[1]), 1)
    for idx in range(len(y)):
        y[idx, 0] = dr.tanh(y[idx, 0])
    return y  # .reshape(original_shape)
    def forward_symbolic(self, var, inputs):
        x = inputs
        layer1_weights, layer2_weights = var
        layer1 = []
        for i in range(self.linear1.weight.size(0)):
            #in the case where wer have bias. Currently disabled
            #ex = self.linear1.bias.data[i].item()
            ex = 0
            for j in range(self.linear1.weight.size(1)):
                ex += inputs[j] * layer1_weights[i][j]
            layer1.append(dreal.tanh(ex))

        layer2 = []
        for i in range(self.linear2.weight.size(0)):
            #ex = self.linear2.bias.data[i].item()
            ex = 0
            for j in range(self.linear2.weight.size(1)):
                ex += layer1[j] * layer2_weights[i][j]
            layer2.append(ex)

        softmax = [
            dreal.exp(layer2[0]) /
            (dreal.exp(layer2[0]) + dreal.exp(layer2[1])),
            dreal.exp(layer2[1]) /
            (dreal.exp(layer2[0]) + dreal.exp(layer2[1]))
        ]
        return softmax
Exemplo n.º 4
0
 def test_functions_with_expression(self):
     self.assertEqual(str(abs(e_x)), "abs(x)")
     self.assertEqual(str(exp(e_x)), "exp(x)")
     self.assertEqual(str(sqrt(e_x)), "sqrt(x)")
     self.assertEqual(str(pow(e_x, e_y)), "pow(x, y)")
     self.assertEqual(str(sin(e_x)), "sin(x)")
     self.assertEqual(str(cos(e_x)), "cos(x)")
     self.assertEqual(str(tan(e_x)), "tan(x)")
     self.assertEqual(str(asin(e_x)), "asin(x)")
     self.assertEqual(str(acos(e_x)), "acos(x)")
     self.assertEqual(str(atan(e_x)), "atan(x)")
     self.assertEqual(str(atan2(e_x, e_y)), "atan2(x, y)")
     self.assertEqual(str(sinh(e_x)), "sinh(x)")
     self.assertEqual(str(cosh(e_x)), "cosh(x)")
     self.assertEqual(str(tanh(e_x)), "tanh(x)")
     self.assertEqual(str(min(e_x, e_y)), "min(x, y)")
     self.assertEqual(str(max(e_x, e_y)), "max(x, y)")
     self.assertEqual(str(if_then_else(e_x > e_y, e_x, e_y)),
                      "(if (x > y) then x else y)")
Exemplo n.º 5
0
 def test_functions_with_variable(self):
     self.assertEqual(str(abs(x)), "abs(x)")
     self.assertEqual(str(exp(x)), "exp(x)")
     self.assertEqual(str(sqrt(x)), "sqrt(x)")
     self.assertEqual(str(pow(x, y)), "pow(x, y)")
     self.assertEqual(str(sin(x)), "sin(x)")
     self.assertEqual(str(cos(x)), "cos(x)")
     self.assertEqual(str(tan(x)), "tan(x)")
     self.assertEqual(str(asin(x)), "asin(x)")
     self.assertEqual(str(acos(x)), "acos(x)")
     self.assertEqual(str(atan(x)), "atan(x)")
     self.assertEqual(str(atan2(x, y)), "atan2(x, y)")
     self.assertEqual(str(sinh(x)), "sinh(x)")
     self.assertEqual(str(cosh(x)), "cosh(x)")
     self.assertEqual(str(tanh(x)), "tanh(x)")
     self.assertEqual(str(min(x, y)), "min(x, y)")
     self.assertEqual(str(max(x, y)), "max(x, y)")
     self.assertEqual(str(if_then_else(x > y, x, y)),
                      "(if (x > y) then x else y)")
Exemplo n.º 6
0
 def test_functions_with_float(self):
     v_x = 1.0
     v_y = 1.0
     self.assertEqual(abs(v_x), math.fabs(v_x))
     self.assertEqual(exp(v_x), math.exp(v_x))
     self.assertEqual(sqrt(v_x), math.sqrt(v_x))
     self.assertEqual(pow(v_x, v_y), v_x**v_y)
     self.assertEqual(sin(v_x), math.sin(v_x))
     self.assertEqual(cos(v_x), math.cos(v_x))
     self.assertEqual(tan(v_x), math.tan(v_x))
     self.assertEqual(asin(v_x), math.asin(v_x))
     self.assertEqual(acos(v_x), math.acos(v_x))
     self.assertEqual(atan(v_x), math.atan(v_x))
     self.assertEqual(atan2(v_x, v_y), math.atan2(v_x, v_y))
     self.assertEqual(sinh(v_x), math.sinh(v_x))
     self.assertEqual(cosh(v_x), math.cosh(v_x))
     self.assertEqual(tanh(v_x), math.tanh(v_x))
     self.assertEqual(min(v_x, v_y), min(v_x, v_y))
     self.assertEqual(max(v_x, v_y), max(v_x, v_y))
     self.assertEqual(
         if_then_else(Expression(v_x) > Expression(v_y), v_x, v_y),
         v_x if v_x > v_y else v_y)
Exemplo n.º 7
0
    w2 = model.layer2.weight.data.numpy()
    b1 = model.layer1.bias.data.numpy()
    b2 = model.layer2.bias.data.numpy()

    # Falsification
    if i % 10 == 0:
        f = [x2,
             -(system.p.g * dreal.sin(x1) - 2 * system.p.L0 * system.p.delta * x2 ** 3) /
             (system.length(vars_) + 2 * system.p.L0 * system.p.delta * x1 * x2)]

        # Candidate V
        z1 = np.dot(vars_, w1.T) + b1

        a1 = []
        for j in range(0, len(z1)):
            a1.append(dreal.tanh(z1[j]))
        z2 = np.dot(a1, w2.T)+b2
        V_learn = dreal.tanh(z2.item(0))

        print('===========Verifying==========')
        start_ = timeit.default_timer()
        result= CheckLyapunov(vars_, f, V_learn, ball_lb, ball_ub, config, epsilon)
        stop_ = timeit.default_timer()

        if result:
            print("Not a Lyapunov function. Found counterexample: ")
            print(result)
            x = LNF.AddCounterexamples(x, result, 10)
        else:
            valid = True
            print("Satisfy conditions!!")
    b2 = model.layer2.bias.data.numpy()

    # Falsification
    if i % 10 == 0:
        f = [
            x2, -(system.p.g * dreal.sin(x1) +
                  2 * system.p.L0 * system.p.delta * x2**3) /
            (system.length(vars_) + 2 * system.p.L0 * system.p.delta * x1 * x2)
        ]

        # Candidate V
        z1 = np.dot(vars_, w1.T) + b1

        a1 = []
        for j in range(0, len(z1)):
            a1.append(dreal.tanh(z1[j]))
        z2 = np.dot(a1, w2.T) + b2
        V_learn = (z2.item(0))**2

        print('===========Verifying==========')
        start_ = timeit.default_timer()
        result = CheckLyapunov(vars_, f, V_learn, ball_lb, ball_ub, config,
                               epsilon)
        stop_ = timeit.default_timer()

        if result:
            print("Not a Lyapunov function. Found counterexample: ")
            print(result)
            x = LNF.AddCounterexamples(x, result, 10)
        else:
            valid = True
Exemplo n.º 9
0
    w5 = model.layer5.weight.data.numpy()

    b1 = model.layer1.bias.data.numpy()
    b2 = model.layer2.bias.data.numpy()
    b3 = model.layer3.bias.data.numpy()
    b4 = model.layer4.bias.data.numpy()
    b5 = model.layer5.bias.data.numpy()

    # Falsification
    if i % 10 == 0:
        f = [x2,
             -(system.p.g * dreal.sin(x1) + 2 * system.p.L0 * system.p.delta * x2 ** 3) /
             (system.length(vars_) + 2 * system.p.L0 * system.p.delta * x1 * x2)]

        z1 = np.dot(vars_, w1.T) + b1
        a1 = [dreal.tanh(z) for z in z1]
        z2 = np.dot(a1, w2.T) + b2
        a2 = [dreal.tanh(z) for z in z2]
        z3 = np.dot(a2, w3.T) + b3
        a3 = [dreal.tanh(z) for z in z3]
        z4 = np.dot(a3, w4.T) + b4
        a4 = [dreal.tanh(z) for z in z4]
        z5 = np.dot(a4, w5.T) + b5
        V_learn = dreal.tanh(z5.item(0))

        print('===========Verifying==========')
        start_ = timeit.default_timer()
        result = CheckLyapunov(vars_, f, V_learn, ball_lb, ball_ub, config, epsilon)
        stop_ = timeit.default_timer()

        if result:
    b1 = model.layer1.bias.data.numpy()
    b2 = model.layer2.bias.data.numpy()
    b3 = model.layer3.bias.data.numpy()

    # Falsification
    if i % 10 == 0:
        f = [
            x2, -(system.p.g * dreal.sin(x1) +
                  2 * system.p.L0 * system.p.delta * x2**3) /
            (system.length(vars_) + 2 * system.p.L0 * system.p.delta * x1 * x2)
        ]

        # Candidate V
        z1 = np.dot(vars_, w1.T) + b1
        a1 = [dreal.tanh(z) for z in z1]
        z2 = np.dot(a1, w2.T) + b2
        a2 = [dreal.tanh(z) for z in z2]
        z3 = np.dot(a2, w3.T) + b3
        V_learn = z4.item(0)**2

        print('===========Verifying==========')
        start_ = timeit.default_timer()
        result = CheckLyapunov(vars_, f, V_learn, ball_lb, ball_ub, config,
                               epsilon)
        stop_ = timeit.default_timer()

        if result:
            print("Not a Lyapunov function. Found counterexample: ")
            print(result)
            x = LNF.AddCounterexamples(x, result, 10)
Exemplo n.º 11
0
def _sympy_converter(var_map, exp, target, expand_pow=False):
    rv = None
    assert isinstance(exp, sp.Expr) and target is not None

    if isinstance(exp, sp.Symbol):
        rv = var_map.get(exp.name, None)
    elif isinstance(exp, sp.Number):
        try:
            rv = RealVal(exp) if isinstance(target, Z3Verifier) else sp.RealNumber(exp)
        except:  # Z3 parser error
            rep = sp.Float(exp, len(str(exp)))
            rv = RealVal(rep)
    elif isinstance(exp, sp.Add):
        # Add(exp_0, ...)
        rv = _sympy_converter(var_map, exp.args[0], target, expand_pow=expand_pow)  # eval this expression
        for e in exp.args[1:]:  # add it to all other remaining expressions
            rv += _sympy_converter(var_map, e, target, expand_pow=expand_pow)
    elif isinstance(exp, sp.Mul):
        rv = _sympy_converter(var_map, exp.args[0], target, expand_pow=expand_pow)
        for e in exp.args[1:]:
            rv *= _sympy_converter(var_map, e, target, expand_pow=expand_pow)
    elif isinstance(exp, sp.Pow):
        x = _sympy_converter(var_map, exp.args[0], target, expand_pow=expand_pow)
        e = _sympy_converter(var_map, exp.args[1], target, expand_pow=expand_pow)
        if expand_pow:
            try:
                i = float(e.sexpr())
                assert i.is_integer()
                i = int(i) - 1
                rv = x
                for _ in range(i):
                    rv *= x
            except:  # fallback
                rv = _sympy_converter(var_map, exp, target, expand_pow=False)
        else:
            rv = x ** e
    elif isinstance(exp, sp.Max):
        x = _sympy_converter(var_map, exp.args[1], target, expand_pow=expand_pow)
        zero = exp.args[0]
        if target == Z3Verifier:
            rv = z3.If(x >= 0.0, x, 0.0)
        else:
            rv = dr.max(x, 0.0)
    elif isinstance(exp, sp.Heaviside):
        x = _sympy_converter(var_map, exp.args[0], target, expand_pow=False)
        if target == Z3Verifier:
            rv = z3.If(x > 0.0, 1.0, 0.0)
        else:
            rv = dr.if_then_else(x>0.0, 1.0, 0.0)
    elif isinstance(exp, sp.Function):
        # check various activation types ONLY FOR DREAL
        if isinstance(exp, sp.tanh):
            rv = dr.tanh(_sympy_converter(var_map, exp.args[0], target, expand_pow=expand_pow))
        elif isinstance(exp, sp.sin):
            rv = dr.sin(_sympy_converter(var_map, exp.args[0], target, expand_pow=expand_pow))
        elif isinstance(exp, sp.cos):
            rv = dr.cos(_sympy_converter(var_map, exp.args[0], target, expand_pow=expand_pow))
        elif isinstance(exp, sp.exp):
            rv = dr.exp(_sympy_converter(var_map, exp.args[0], target, expand_pow=expand_pow))
    else:
        ValueError('Term ' + str(exp) + ' not recognised')

    assert rv is not None
    return rv
    b1 = model.layer1.bias.data.numpy()
    b2 = model.layer2.bias.data.numpy()
    b3 = model.layer3.bias.data.numpy()

    # Falsification
    if i % 10 == 0:
        f = [
            x2, -(system.p.g * dreal.sin(x1) +
                  2 * system.p.L0 * system.p.delta * x2**3) /
            (system.length(vars_) + 2 * system.p.L0 * system.p.delta * x1 * x2)
        ]

        # Candidate V
        z1 = np.dot(vars_, w1.T) + b1
        a1 = [dreal.tanh(z) for z in z1]
        z2 = np.dot(a1, w2.T) + b2
        a2 = [dreal.tanh(z) for z in z2]
        z3 = np.dot(a2, w3.T) + b3
        V_learn = dreal.tanh(z3.item(0))

        print('===========Verifying==========')
        start_ = timeit.default_timer()
        result = CheckLyapunov(vars_, f, V_learn, ball_lb, ball_ub, config,
                               epsilon)
        stop_ = timeit.default_timer()

        if result:
            print("Not a Lyapunov function. Found counterexample: ")
            print(result)
            x = LNF.AddCounterexamples(x, result, 10)
Exemplo n.º 13
0
    b1 = model.layer1.bias.data.numpy()
    b2 = model.layer2.bias.data.numpy()
    b3 = model.layer3.bias.data.numpy()
    b4 = model.layer4.bias.data.numpy()

    # Falsification
    if i % 10 == 0:
        f = [
            x2, -(system.p.g * dreal.sin(x1) +
                  2 * system.p.L0 * system.p.delta * x2**3) /
            (system.length(vars_) + 2 * system.p.L0 * system.p.delta * x1 * x2)
        ]

        z1 = np.dot(vars_, w1.T) + b1
        a1 = [dreal.tanh(z) for z in z1]
        z2 = np.dot(a1, w2.T) + b2
        a2 = [dreal.tanh(z) for z in z2]
        z3 = np.dot(a2, w3.T) + b3
        a3 = [dreal.tanh(z) for z in z3]
        z4 = np.dot(a3, w4.T) + b4
        V_learn = dreal.tanh(z4.item(0))

        print('===========Verifying==========')
        start_ = timeit.default_timer()
        result = CheckLyapunov(vars_, f, V_learn, ball_lb, ball_ub, config,
                               epsilon)
        stop_ = timeit.default_timer()

        if result:
            print("Not a Lyapunov function. Found counterexample: ")