def setUp(self): self.c1 = core.Constant([2.1, 3], trainable=False) self.c2 = core.Constant([4.5, 0.8], trainable=False) self.n_x = 10 self.x = core.Variable('x', np.random.normal(0., 1., (self.n_x, 2))) self.n_y = 5 self.y = core.Variable('y', np.random.normal(0., 4., (self.n_y, 2)))
def test_from_trainable_constants(self): c1 = core.Constant([2.1, 3], trainable=True) c2 = core.Constant([4.5, 0.8], trainable=True) with tf.GradientTape() as tape: x = core.Variable.from_constants('x', [c1, c2], tape=tape) self.assertTrue(tape.gradient(x.tensor, c1.tensor) is not None) tape = tf.GradientTape() with self.assertRaises(ValueError): x = core.Variable.from_constants('x', [c1, c2], tape=tape)
def test_trainable(self): for c_val in self.c_vals: c = core.Constant(c_val, trainable=True) self.assertTrue(array_allclose(c.tensor.numpy(), c_val)) self.assertTrue(isinstance(c.tensor, tf.Variable)) self.assertTrue(c.tensor.trainable) self.assertEqual(c.free_vars, [])
def test_init_from_tf(self): for c_val in self.c_vals: c_val = tf.constant(c_val) c = core.Constant(c_val, trainable=False) self.assertTrue(array_allclose(c.tensor.numpy(), c_val)) self.assertTrue(isinstance(c.tensor, tf.Tensor)) self.assertEqual(c.free_vars, [])
def test_gradients(self): """Tracks gradients""" c_s = [ core.Constant(np.random.rand(2), trainable=True) for _ in range(self.n_x) ] with tf.GradientTape() as tape: x = core.Variable.from_constants('x', c_s, tape) phi = self.p1([x, self.y]) res = self.Forall(x, phi) self.assertTrue(tape.gradient(res.tensor, c_s[0].tensor) is not None)
def setUp(self): self.And = core.Wrapper_Connective(fuzzy_ops.And_Prod()) self.Not = core.Wrapper_Connective(fuzzy_ops.Not_Std()) self.c1 = core.Constant([2.1, 3], trainable=True) self.n_x = 10 self.x = core.Variable('x', np.random.normal(0., 1., (self.n_x, 2))) self.n_y = 5 self.y = core.Variable('y', np.random.normal(0., 4., (self.n_y, 2))) self.p1 = core.Predicate.MLP([2]) self.p2 = core.Predicate.MLP([2, 2]) self.a = core.Proposition(0., trainable=True)
def test_free_dims_wt_broadcast_constant(self): """Adds the correct dimensions if a constant is involved""" c1 = core.Constant([2.1, 3], trainable=False) exprs = core.broadcast_exprs(list(self.xs.values()) + [c1]) for expr in exprs: self.assertEqual(sorted(expr.free_vars), sorted(self.var_settings.keys())) for label, v_s in self.var_settings.items(): self.assertEqual( expr._get_dim_of_free_var(label).numpy(), v_s["n_individuals"])
def setUp(self): self.x = core.Variable('x', np.random.rand(3, 1)) self.y = core.Variable('y', np.random.rand(4, 1)) self.c = core.Constant([3.], trainable=False) self.f1 = core.Function.MLP(input_shapes=[1], output_shape=[1]) self.f2 = core.Function.MLP(input_shapes=[1, 1], output_shape=[1]) self.p1 = core.Predicate.MLP(input_shapes=[1]) self.p2 = core.Predicate.MLP(input_shapes=[1, 1]) self.q = core.Proposition(0., trainable=False) self.And = core.Wrapper_Connective(fuzzy_ops.And_Prod()) self.Not = core.Wrapper_Connective(fuzzy_ops.Not_Std()) self.Exists = core.Wrapper_Quantifier(fuzzy_ops.Aggreg_Mean(), semantics="exists") self.mask = core.Formula(tf.constant([[1., 1., 0., 0.], [0., 1., 1., 0.], [0., 0., 1., 0.]]), free_vars=['x', 'y'])
def test_from_non_trainable_constants(self): c1 = core.Constant([2.1, 3], trainable=False) c2 = core.Constant([4.5, 0.8], trainable=False) with tf.GradientTape() as tape: x = core.Variable.from_constants('x', [c1, c2], tape=None) self.assertTrue(tape.gradient(x.tensor, c1.tensor) is None)