示例#1
0
    def testSimplePredicateOptimization(self):
        nr_samples=100

        ltnw.variable("?data_A",numpy.random.uniform([0.,0.],[.1,1.],(nr_samples,2)).astype("float32"))
        ltnw.variable("?data_not_A",numpy.random.uniform([2.,0],[3.,1.],(nr_samples,2)).astype("float32"))
        
        ltnw.predicate("A",2)
        
        ltnw.axiom("forall ?data_A: A(?data_A)")
        ltnw.axiom("forall ?data_not_A: ~A(?data_not_A)")
        
        ltnw.initialize_knowledgebase(initial_sat_level_threshold=.1)
        sat_level=ltnw.train(track_sat_levels=10000,sat_level_epsilon=.99)
        
        self.assertGreater(sat_level,.8)
            
        ltnw.constant("a",[0.5,0.5])
        ltnw.constant("b",[2.5,0.5])
        
        self.assertGreater(ltnw.ask("A(a)")[0],.8)
        self.assertGreater(ltnw.ask("~A(b)")[0],.8)

        result=ltnw.ask_m(["A(a)","~A(b)"])
        
        for r in result:
            self.assertGreater(r[0],.8)
            self.assertGreater(r[0],.8)
示例#2
0
    def testSimplePredicate(self):
        import tensorflow
        
        nr_samples=100

        ltnw.constant("a",[2.,3.])
        ltnw.variable("?data_A",numpy.random.uniform([0.,0.],[.1,1.],(nr_samples,2)).astype("float32"))
        
        mu=tensorflow.constant([2.,3.])
        ltnw.predicate("A",2,pred_definition=lambda x: tensorflow.exp(-tensorflow.norm(tensorflow.subtract(x,mu),axis=1)));       
        
        self.assertEqual(ltnw.ask("A(a)"),1.)
        self.assertGreater(ltnw.ask("forall ?data_A: A(?data_A)"),0.)
示例#3
0
    def testBuildFormula(self):
        data = numpy.random.uniform([-1,-1],[1,1],(500,2),).astype(numpy.float32)
        ltnw.constant("c",[1.,0])
        ltnw.variable("?var",data)
        ltnw.variable("?var2",data)
        ltnw.function("f",2,fun_definition=lambda d:d[:,:2])
        ltnw.function("g",4,fun_definition=lambda d:d)
        ltnw.predicate("P",2)
        ltnw.predicate("B",2)
        ltnw.predicate("REL",4)
        
        self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("P(c)")))
        with self.assertRaises(Exception):
            ltnw._build_formula(ltnw._parse_formula("P(d)"))
        self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("P(?var)")))
        with self.assertRaises(Exception):
            ltnw._build_formula(ltnw._parse_formula("P(?vars)"))
        self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("P(f(?var))")))
        with self.assertRaises(Exception):
            ltnw._build_formula(ltnw._parse_formula("P(h(?var))")) # h not declared
        
        self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("~P(?var)")))
        self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("~P(f(?var))")))
        
        self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("~REL(?var,?var2)")))
        self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("~REL(?var,f(g(?var2)))")))
        with self.assertRaises(Exception):
            self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("~REL(f(?var))")))

        for op in ["&","|","->"]:
            self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("P(?var) %s ~ P(?var)" % op)))
            self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("P(?var) %s ~ P(?var)" % op)))
            self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("~P(?var) %s P(?var)" % op)))
            self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("~P(?var) %s ~P(?var)" % op)))

        for i in range(10):
            self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("P(?var) %s ~P(?var)%s ~P(?var)%s P(?var)" % tuple(numpy.random.permutation(["&","|","->"])))))

        self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("forall ?var: P(?var) & ~ P(?var)")))
        self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("forall ?var,?var2: P(?var) & ~ P(?var2)")))        
        self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("P(c) & (forall ?var,?var2: P(?var) & ~ P(?var2))")))

        self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("exists ?var: P(?var) & ~ P(?var)")))
        self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("exists ?var,?var2: P(?var) & ~ P(?var2)")))        


        self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("forall ?var: (exists ?var2: P(?var) & ~ P(?var2))")))
        self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("forall ?var: (exists ?var2: P(?var) & ~ P(?var2))")))
        
        self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("(forall ?var: (exists ?var2: (P(?var) & P(?var2) & (forall ?var: P(?var)))))")))

        self.assertIsNotNone(ltnw._build_formula(ltnw._parse_formula("P(c) | P(?var)")))
示例#4
0
# define data
train_X = np.random.uniform(start, end, (training_size)).astype("float32")
train_Y = slope * train_X + np.random.normal(scale=var, size=len(train_X))

# define the language, we translate every training example into constants
[ltnw.constant("x_%s" % i, [x]) for i, x in enumerate(train_X)]
[ltnw.constant("y_%s" % i, [y]) for i, y in enumerate(train_Y)]

# define the function f as a linear regressor
W = tf.Variable(np.random.randn(), name="weight")
b = tf.Variable(np.random.randn(), name="bias")
ltnw.function("f", 1, 1, fun_definition=lambda X: tf.add(tf.multiply(X, W), b))

# defining an equal predicate based on the euclidian distance of two vectors
ltnw.predicate("eq", 2, ltnl.equal_euclidian)

# defining the theory
for f in ["eq(f(x_%s),y_%s)" % (i, i) for i in range(len(train_X))]:
    ltnw.axiom(f)
print("\n".join(sorted(ltnw.AXIOMS.keys())))

# initializing knowledgebase and optimizing
ltnw.initialize_knowledgebase(optimizer=tf.train.GradientDescentOptimizer(
    learning_rate=learning_rate))
ltnw.train(max_epochs=epochs)

# visualize results on training data
ltnw.variable("?x", 1)
prediction = ltnw.ask("f(?x)",
                      feed_dict={"?x": train_X.reshape(len(train_X), 1)})
示例#5
0
    if np.sum(np.square(data[i] - data[j])) < np.square(.5)
])

close_data = close_data[np.random.random_integers(0, len(data), 1000)]
distant_data = np.array([
    np.concatenate([data[i], data[j]]) for i in range(len(data))
    for j in range(len(data))
    if np.sum(np.square(data[i] - data[j])) > np.square(1.)
])

# defining the language
ltnw.variable("?x", data)
ltnw.variable("?y", data)
ltnw.variable("?close_x_y", close_data)
ltnw.variable("?distant_x_y", distant_data)
[ltnw.predicate("C_" + str(i), 2) for i in range(nr_of_clusters)]

ltnw.function("first", 2, fun_definition=lambda d: d[:, :2])
ltnw.function("second", 2, fun_definition=lambda d: d[:, 2:])

print("defining the theory T")
ltnw.axiom("forall ?x: %s" %
           "|".join(["C_%s(?x)" % i for i in range(nr_of_clusters)]))
for i in range(nr_of_clusters):
    ltnw.axiom("exists ?x: C_%s(?x)" % i)
    ltnw.axiom(
        "forall ?close_x_y: C_%s(first(?close_x_y)) %% C_%s(second(?close_x_y))"
        % (i, i))
    ltnw.axiom(
        "forall ?distant_x_y: C_%s(first(?distant_x_y)) %% C_%s(second(?distant_x_y))"
        % (i, i))
nr_of_points_x_cluster = 50
clusters = []
for i in range(nr_of_clusters):
    mean = np.random.uniform([-1, -1], [1, 1], 2).astype(np.float32)
    cov = np.array([[.001, 0], [0, .001]])
    clusters.append(
        np.random.multivariate_normal(mean=mean,
                                      cov=cov,
                                      size=nr_of_points_x_cluster).astype(
                                          np.float32))
data = np.concatenate(clusters)

# define the language
ltnw.variable("?x", data)
ltnw.variable("?y", data)
ltnw.predicate("close", 2, ltnl.equal_euclidian)
[ltnw.predicate("C_" + str(i), 2) for i in range(nr_of_clusters)]

## define the theory
print("defining the theory T")
ltnw.axiom("forall ?x: %s" %
           "|".join(["C_%s(?x)" % i for i in range(nr_of_clusters)]))
for i in range(nr_of_clusters):
    ltnw.axiom("exists ?x: C_%s(?x)" % i)
    ltnw.axiom("forall ?x,?y: (C_%s(?x) & close(?x,?y)) -> C_%s(?y)" % (i, i))
    ltnw.axiom(
        "forall ?x,?y: (C_%s(?x) & ~close(?x,?y)) -> (%s)" %
        (i, "|".join(["C_%s(?y)" % j
                      for j in range(nr_of_clusters) if i != j])))

    for j in range(i + 1, nr_of_clusters):
import numpy as np
import matplotlib.pyplot as plt

import logictensornetworks_wrapper as ltnw

nr_samples=500

data=np.random.uniform([0,0],[1.,1.],(nr_samples,2)).astype(np.float32)
data_A=data[np.where(np.sum(np.square(data-[.5,.5]),axis=1)<.09)]
data_not_A=data[np.where(np.sum(np.square(data-[.5,.5]),axis=1)>=.09)]

ltnw.variable("?data_A",data_A)
ltnw.variable("?data_not_A",data_not_A)
ltnw.variable("?data",data)

ltnw.predicate("A",2)

ltnw.axiom("forall ?data_A: A(?data_A)")
ltnw.axiom("forall ?data_not_A: ~A(?data_not_A)")

ltnw.initialize_knowledgebase(initial_sat_level_threshold=.1)
sat_level=ltnw.train(track_sat_levels=1000,sat_level_epsilon=.99)
    
plt.figure(figsize=(12,8))
result=ltnw.ask("A(?data)")
plt.subplot(2,2,1)
plt.scatter(data[:,0],data[:,1],c=result.squeeze())
plt.colorbar()
plt.title("A(x) - training data")

result=ltnw.ask("~A(?data)")
logger.basicConfig = logging.basicConfig(level=logging.DEBUG)

import tensorflow as tf
import matplotlib.pyplot as plt
import numpy as np

import logictensornetworks_wrapper as ltnw

import spatial_relations_data

# generate artificial data
nr_examples = 50 # positive and negative examples for each predicate
nr_test_examples=400

# 1) define the language and examples
ltnw.predicate("Left",8)
ltnw.variable("?left_xy",8)
ltnw.variable("?not_left_xy", 8)
ltnw.axiom("forall ?left_xy: Left(?left_xy)")
ltnw.axiom("forall ?not_left_xy: ~Left(?not_left_xy)")


ltnw.predicate("Right",8)
ltnw.variable("?right_xy",8)
ltnw.variable("?not_right_xy",8)
ltnw.axiom("forall ?right_xy: Right(?right_xy)")
ltnw.axiom("forall ?not_right_xy: ~Right(?not_right_xy)")

ltnw.predicate("Below",8)
ltnw.variable("?below_xy",8)
ltnw.variable("?not_below_xy",8)
示例#9
0
ltnw.variable('?left_pair',
              [full_obj_set[p[0]] + full_obj_set[p[1]] for p in left_pairs])
ltnw.variable('?front_pair',
              [full_obj_set[p[0]] + full_obj_set[p[1]] for p in front_pairs])
ltnw.variable('?behind_pair',
              [full_obj_set[p[0]] + full_obj_set[p[1]] for p in behind_pairs])

time_diff = time.time() - start_time
print('Time to complete : ', time_diff)
start_time = time.time()
print('******* Predicate/Axioms for Object Features ******')

# Object Features
for feat in obj_feat:
    ltnw.predicate(label=feat.capitalize(),
                   number_of_features_or_vars=num_of_features,
                   layers=num_of_layers)

for i, feat in enumerate(obj_feat):
    ltnw.axiom('forall ?is_' + feat + ' : ' + feat.capitalize() + '(?is_' +
               feat + ')')
    ltnw.axiom('forall ?isnot_' + feat + ' : ~' + feat.capitalize() +
               '(?isnot_' + feat + ')')

# Implicit axioms about object features
## objects can only be one color
for c in obj_colors:
    is_color = ''
    is_not_color = ''
    for not_c in obj_colors:
        if not_c == c: is_color = c.capitalize() + '(?obj)'
示例#10
0
for l in 'abcdefghijklmn':
    ltnw.constant(l,
                  min_value=[0.] * embedding_size,
                  max_value=[1.] * embedding_size)

# create variables that ranges on all the individuals, and the individuals of group 1 and group 2.

ltnw.variable("p", tf.concat(list(ltnw.CONSTANTS.values()), axis=0))
ltnw.variable("q", ltnw.VARIABLES["p"])
ltnw.variable("p1", tf.concat([ltnw.CONSTANTS[l] for l in "abcdefgh"], axis=0))
ltnw.variable("q1", ltnw.VARIABLES["p1"])
ltnw.variable("p2", tf.concat([ltnw.CONSTANTS[l] for l in "ijklmn"], axis=0))
ltnw.variable("q2", ltnw.VARIABLES["p2"])

# declare the predicates
ltnw.predicate('Friends', embedding_size * 2)
ltnw.predicate('Smokes', embedding_size)
ltnw.predicate('Cancer', embedding_size)

# add the assertional knowledge in our posses

ltnw.axiom("Friends(a,b)")
ltnw.axiom("~Friends(a,c)")
ltnw.axiom("~Friends(a,d)")
ltnw.axiom("Friends(a,e)")
ltnw.axiom("Friends(a,f)")
ltnw.axiom("Friends(a,g)")
ltnw.axiom("~Friends(a,h)")
ltnw.axiom("Friends(b,c)")
ltnw.axiom("~Friends(b,d)")
ltnw.axiom("~Friends(b,e)")
示例#11
0
nr_samples=5
epochs=10000

data_A=np.random.uniform([0,0],[.25,1.],(nr_samples,2)).astype(np.float32)
data_B=np.random.uniform([.75,0],[1.,1.],(nr_samples,2)).astype(np.float32)
data=np.concatenate([data_A,data_B])

ltnw.variable("?data_A",data_A)
ltnw.variable("?data_A_2",data_A)
ltnw.variable("?data_B",data_B)
ltnw.variable("?data_B_2",data_B)
ltnw.variable("?data",data)
ltnw.variable("?data_1",data)
ltnw.variable("?data_2",data)

ltnw.predicate("A",2)
ltnw.predicate("B",2)

ltnw.axiom("forall ?data_A: A(?data_A)")
ltnw.axiom("forall ?data_B: ~A(?data_B)")

ltnw.axiom("forall ?data_B: B(?data_B)")
ltnw.axiom("forall ?data_A: ~B(?data_A)")

ltnw.predicate("R_A_A",4)
ltnw.predicate("R_B_B",4)
ltnw.predicate("R_A_B",4)

ltnw.axiom("forall ?data, ?data_2: (A(?data) & A(?data_2)) -> R_A_A(?data,?data_2)")
ltnw.axiom("forall ?data, ?data_2: R_A_A(?data,?data_2) -> (A(?data) & A(?data_2))")
示例#12
0
 def testPredicate(self):
     p1=ltnw.predicate("P1",2)
     p2=ltnw.predicate("P2",2)
     self.assertEqual(ltnw.predicate("P1"),p1)
     self.assertEqual(ltnw.predicate("P2"),p2)
     self.assertNotEqual(ltnw.predicate("P1"),p2)