Пример #1
0
def variable(label, *args, **kwargs):
    label = _variable_label(label)
    if label in VARIABLES and args == () and kwargs == {}:
        return VARIABLES[label]
    elif label in VARIABLES and CONFIGURATION.get("error_on_redeclare"):
        logging.getLogger(__name__).error(
            "Attempt at redeclaring existing variable %s" % label)
        raise Exception("Attempt at redeclaring existing variable %s" % label)
    else:
        if label in VARIABLES:
            logging.getLogger(__name__).warn(
                "Redeclaring existing variable %s" % label)
        VARIABLES[label] = ltn.variable(label, *args, **kwargs)
        return VARIABLES[label]
Пример #2
0
facts = [eq(f(x), y) for x, y in zip(c_x, c_y)]
cost = -tf.reduce_mean(tf.stack(facts))

sess = tf.Session()
opt = tf.train.GradientDescentOptimizer(
    learning_rate=learning_rate).minimize(cost)

init = tf.global_variables_initializer()
sess.run(init)
for i in range(epochs):
    sess.run(opt)
    if i % 10 == 0:
        print(i, "sat level -----> ", sess.run(cost))

# Testing example
x = ltn.variable("x", 1)
plt.figure()
plt.plot(train_X, train_Y, 'bo', label='Training data', color="black")
plt.plot(train_X, sess.run(W) * train_X + sess.run(b), label='Fitted line')
plt.plot(train_X,
         sess.run(f(x), feed_dict={x: train_X.reshape(len(train_X), 1)}),
         'bo',
         label='prediction',
         color="red")
plt.legend()
plt.show()

test_X = np.random.uniform(start, end, (testing_size)).astype("float32")
test_Y = slope * train_X + np.random.normal(scale=var, size=len(train_X))
plt.figure()
plt.plot(test_X, test_Y, 'bo', label='Testing data')
Пример #3
0
import logictensornetworks as ltn
from logictensornetworks import Implies,And,Not,Forall,Exists
import tensorflow as tf
import numpy as np
a = ltn.proposition("a",value=.2)
b = ltn.proposition("b")
c = ltn.proposition("c")
w1 = ltn.proposition("w1",value=.3)
w2 = ltn.proposition("w2",value=.9)

x = ltn.variable("x",np.array([[1,2],[3,4],[5,6]]).astype(np.float32))
P = ltn.predicate("P",2)

formula = And(Implies(And(Forall(x,P(x)),a,b),Not(c)),c)
w1_formula1 = Implies(w1,Forall(x,P(x)))
w2_formula2 = Implies(w2,Exists(x,P(x)))

sat = tf.train.GradientDescentOptimizer(0.01).minimize(-tf.concat([formula,w1_formula1,w2_formula2],axis=0))
with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    for i in range(100):
        sess.run(sat)
        if i % 10 == 0:
            print(sess.run(formula))
    print(sess.run([a,b,c]))
    print(sess.run(And(a,P(x))))
p_type = args["p"]
""" DATASET """

ds_train, ds_test = data.get_mnist_op_dataset(
    count_train=n_examples_train,
    count_test=n_examples_test,
    buffer_size=10000,
    batch_size=batch_size,
    n_operands=4,
    op=lambda args: 10 * args[0] + args[1] + 10 * args[2] + args[3])
""" LTN MODEL AND LOSS """
### Predicates
logits_model = baselines.SingleDigit()
Digit = ltn.Predicate(ltn.utils.LogitsToPredicateModel(logits_model))
### Variables
d1 = ltn.variable("digits1", range(10))
d2 = ltn.variable("digits2", range(10))
d3 = ltn.variable("digits3", range(10))
d4 = ltn.variable("digits4", range(10))
### Operators
Not = ltn.Wrapper_Connective(ltn.fuzzy_ops.Not_Std())
And = ltn.Wrapper_Connective(ltn.fuzzy_ops.And_Prod())
Or = ltn.Wrapper_Connective(ltn.fuzzy_ops.Or_ProbSum())
Implies = ltn.Wrapper_Connective(ltn.fuzzy_ops.Implies_Reichenbach())
Forall = ltn.Wrapper_Quantifier(ltn.fuzzy_ops.Aggreg_pMeanError(),
                                semantics="forall")
Exists = ltn.Wrapper_Quantifier(ltn.fuzzy_ops.Aggreg_pMean(),
                                semantics="exists")


### Axioms
Пример #5
0
def phi3(features):
    x = ltn.variable("x", features)
    return Forall(x, Implies(p([x, class_blue]), p([x, class_male])), p=5)
Пример #6
0
def phi1(features):
    x = ltn.variable("x", features)
    return Forall(x,
                  Implies(p([x, class_blue]), Not(p([x, class_orange]))),
                  p=5)
""" DATASET """

ds_train, ds_test = data.get_mnist_op_dataset(
        count_train=n_examples_train,
        count_test=n_examples_test,
        buffer_size=10000,
        batch_size=batch_size,
        n_operands=2,
        op=lambda args: args[0]+args[1])

""" LTN MODEL AND LOSS """
### Predicates
logits_model = baselines.SingleDigit()
Digit = ltn.Predicate(ltn.utils.LogitsToPredicateModel(logits_model))
### Variables
d1 = ltn.variable("digits1", range(10))
d2 = ltn.variable("digits2", range(10))
### Operators
Not = ltn.Wrapper_Connective(ltn.fuzzy_ops.Not_Std())
And = ltn.Wrapper_Connective(ltn.fuzzy_ops.And_Prod())
Or = ltn.Wrapper_Connective(ltn.fuzzy_ops.Or_ProbSum())
Implies = ltn.Wrapper_Connective(ltn.fuzzy_ops.Implies_Reichenbach())
Forall = ltn.Wrapper_Quantifier(ltn.fuzzy_ops.Aggreg_pMeanError(),semantics="forall")
Exists = ltn.Wrapper_Quantifier(ltn.fuzzy_ops.Aggreg_pMean(),semantics="exists")
### Axioms
@tf.function
def axioms(images_x, images_y, labels_z, p_schedule=tf.constant(2.)):
    images_x = ltn.variable("x", images_x)
    images_y = ltn.variable("y", images_y)
    labels_z = ltn.variable("z", labels_z)
    return Forall(
non_contain_data = np.squeeze(non_contain_data)

non_in_data = np.array([
    np.concatenate([bbs1[i], bbs2[i]], axis=0) for i in range(nr_of_bb)
    if is_not_in(bbs1[i], bbs2[i])
])
non_in_data = np.squeeze(non_in_data)

# and of data generations

# start the definition of the language:

# variables for pairs of rectangles ....

# ... for positive examples of every relation
lxy = ltn.variable("left_xy", tf.cast(left_data, tf.float32))
rxy = ltn.variable("right_xy", tf.cast(right_data, tf.float32))
bxy = ltn.variable("below_xy", tf.cast(below_data, tf.float32))
axy = ltn.variable("above_xy", tf.cast(above_data, tf.float32))
cxy = ltn.variable("contains_xy", tf.cast(contain_data, tf.float32))
ixy = ltn.variable("in_xy", tf.cast(in_data, tf.float32))

# ... for negative examples (they are placeholders which are filled with data
# randomly generated every 100 trian epochs

nlxy = ltn.variable("not_left_xy", 8)
nrxy = ltn.variable("not_right_xy", 8)
nbxy = ltn.variable("not_below_xy", 8)
naxy = ltn.variable("not_above_xy", 8)
ncxy = ltn.variable("not_conts_xy", 8)
nixy = ltn.variable("not_is_in_xy", 8)
def phi2():
    p = ltn.variable("p", tf.stack(list(g.values())))
    q = ltn.variable("q", tf.stack(list(g.values())))
    return Forall((p, q),
                  Implies(Or(Cancer(p), Cancer(q)), Friends([p, q])),
                  p=5)
def phi1():
    p = ltn.variable("p", tf.stack(list(g.values())))
    q = ltn.variable("q", tf.stack(list(g.values())))
    return Forall(p, Implies(Cancer(p), Smokes(p)), p=5)
Пример #11
0
closed_data = np.array([
    np.concatenate([data[i], data[j]]) for i in range(len(data))
    for j in range(i, len(data))
    if np.sum(np.square(data[i] - data[j])) < np.square(.5)
])

closed_data = closed_data[np.random.random_integers(0, len(data), 1000)]
distant_data = np.array([
    np.concatenate([data[i], data[j]]) for i in range(len(data))
    for j in range(len(data))
    if np.sum(np.square(data[i] - data[j])) > np.square(1.)
])

# defining the language

x = ltn.variable("x", data)
y = ltn.variable("y", data)
closed_x_y = ltn.variable("closed_x_y", closed_data)
distant_x_y = ltn.variable("distant_x_y", distant_data)

C = {i: ltn.predicate("C_" + str(i), x) for i in clst_ids}

first = ltn.function("first", closed_x_y, fun_definition=lambda d: d[:, :2])
second = ltn.function("second", closed_x_y, fun_definition=lambda d: d[:, 2:])

print("defining the theory T")
T = tf.reduce_mean(
    tf.concat(
        [Forall(x, Or(*[C[i](x) for i in clst_ids]))] +
        [Exists(x, C[i](x)) for i in clst_ids] + [
            Forall(closed_x_y,
logictensornetworks.operators.set_universal_aggreg("mean")

size = 20
g1 = {l:ltn.Constant(l,min_value=[0.]*size,max_value=[1.]*size).ground for l in 'abcdefgh'}
g2 = {l:ltn.Constant(l,min_value=[0.]*size,max_value=[1.]*size).ground for l in 'ijklmn'}
g = {**g1,**g2}


friends = [('a','b'),('a','e'),('a','f'),('a','g'),('b','c'),('c','d'),('e','f'),('g','h'),
           ('i','j'),('j','m'),('k','l'),('m','n')]
smokes = ['a','e','f','g','j','n']
cancer = ['a','e']


p = ltn.variable("p",tf.concat(list(g.values()),axis=0))
q = ltn.variable("q",tf.concat(list(g.values()),axis=0))

p1 = ltn.variable("p1",tf.concat(list(g1.values()),axis=0))
q1 = ltn.variable("q1",tf.concat(list(g1.values()),axis=0))

p2 = ltn.variable("p2",tf.concat(list(g2.values()),axis=0))
q2 = ltn.variable("q2",tf.concat(list(g2.values()),axis=0))

Friends = ltn.Predicate('Friends',size*2).ground
Smokes = ltn.Predicate('Smokes',size).ground
Cancer = ltn.Predicate('Cancer',size).ground


facts = [Friends(g[x],g[y]) for (x,y) in friends]+\
        [Not(Friends(g[x],g[y])) for x in g1 for y in g1
    for w in s.split() if not is_location(w, s)
])

all_words_data = np.array([
    get_embedding_data(w, w2v_model)[0] for s in train_combinations
    for w in s.split()
])

# and of data generations

# start the definition of the language:

# variables for pairs of rectangles ....

# ... for positive examples of every relation
act = ltn.variable("actor_data", tf.cast(actor_data, tf.float32))
mov = ltn.variable("movement_data", tf.cast(movement_data, tf.float32))
loc = ltn.variable("below_xy", tf.cast(location_data, tf.float32))

# ... for negative examples (they are placeholders which are filled with data
# randomly generated every 100 trian epochs

nact = ltn.variable("not_actor_data", emb_size)
nmov = ltn.variable("not_movement_data", emb_size)
nloc = ltn.variable("not_location_data", emb_size)

# printing out the dimensions of examples
pxy = [act, mov, loc]
npxy = [nact, nmov, nloc]

for xy in pxy:
Пример #14
0
from logictensornetworks import And, Not, Equiv, Forall, Implies
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt

# loading data

data = np.random.uniform(
    [-1, -1],
    [1, 1],
    (500, 2),
).astype(np.float32)

# defining the language

x = ltn.variable("x", data)
y = ltn.variable("y", data)

a = ltn.constant("a", [0.5, 0.5])
b = ltn.constant("x", [-0.5, -0.5])

A = ltn.predicate("A", 2)
B = ltn.predicate("B", 2)
T = And(A(a), B(b), Not(A(b)), Forall(x, Implies(A(x), B(x))))

# start a tensorflow session

sess = tf.Session()
init = tf.global_variables_initializer()
sess.run(init)
opt = tf.train.GradientDescentOptimizer(learning_rate=0.1).minimize(-T)
Пример #15
0
def axioms(x_data, y_data):
    x = ltn.variable("x", x_data)
    y = ltn.variable("y", y_data)
    return Forall(ltn.diag(x,y), eq([f(x),y]))
Пример #16
0
cross_entropy = tf.reduce_mean(
    tf.nn.softmax_cross_entropy_with_logits_v2(
        labels=tf.reshape(y_, (-1, embedding_size)),
        logits=tf.reshape(y_conv_before_softmax[:train_batch_size],
                          (-1, embedding_size))))
correct_prediction = tf.equal(
    tf.argmax(tf.reshape(y_conv, (-1, embedding_size)), axis=1),
    tf.argmax(tf.reshape(y_, (-1, embedding_size)), axis=1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))

# end of mnist

# begin of ltn

dd = ltn.variable("double_digit", y_conv)


def get_nth_element(n):
    def result(p):
        return p[:, n]

    return result


IS1 = {
    n: ltn.predicate("is_equal_to_" + str(n),
                     embedding_size * 2,
                     pred_definition=get_nth_element(n))
    for n in range(10)
}