Exemplo n.º 1
0
def function(label, *args, **kwargs):
    if label in FUNCTIONS and args == () and kwargs == {}:
        return FUNCTIONS[label]
    elif label in FUNCTIONS and CONFIGURATION.get("error_on_redeclare"):
        logging.getLogger(__name__).error("Attempt at redeclaring existing function %s" % label)
        raise Exception("Attempt at redeclaring existing function %s" % label)
    else:
        if label in FUNCTIONS:
            logging.getLogger(__name__).warn("Redeclaring existing function %s" % label)
        FUNCTIONS[label] = ltn.function(label, *args, **kwargs)
        return FUNCTIONS[label]
Exemplo n.º 2
0
distant_data = np.array([
    np.concatenate([data[i], data[j]]) for i in range(len(data))
    for j in range(len(data))
    if np.sum(np.square(data[i] - data[j])) > np.square(1.)
])

# defining the language

x = ltn.variable("x", data)
y = ltn.variable("y", data)
closed_x_y = ltn.variable("closed_x_y", closed_data)
distant_x_y = ltn.variable("distant_x_y", distant_data)

C = {i: ltn.predicate("C_" + str(i), x) for i in clst_ids}

first = ltn.function("first", closed_x_y, fun_definition=lambda d: d[:, :2])
second = ltn.function("second", closed_x_y, fun_definition=lambda d: d[:, 2:])

print("defining the theory T")
T = tf.reduce_mean(
    tf.concat(
        [Forall(x, Or(*[C[i](x) for i in clst_ids]))] +
        [Exists(x, C[i](x)) for i in clst_ids] + [
            Forall(closed_x_y,
                   Equiv(C[i](first(closed_x_y)), C[i](second(closed_x_y))))
            for i in clst_ids
        ] + [
            Forall(
                distant_x_y,
                Not(And(C[i](first(distant_x_y)),
                        (C[i](second(distant_x_y)))))) for i in clst_ids
train_X = np.random.uniform(start, end, (training_size)).astype("float32")
train_Y = slope * train_X + np.random.normal(scale=var, size=len(train_X))

W = tf.Variable(np.random.randn(), name="weight")
b = tf.Variable(np.random.randn(), name="bias")


def apply_fun(X):
    return tf.add(tf.multiply(X, W), b)


c_x = [ltn.constant("x_%s" % i, [x]) for i, x in enumerate(train_X)]
c_y = [ltn.constant("y_%s" % i, [y]) for i, y in enumerate(train_Y)]

f = ltn.function("f", 1, 1, fun_definition=apply_fun)
eq = ltn.predicate("equal", 2, lambda x, y: ltnl.equal_euclidian(x, y))

facts = [eq(f(x), y) for x, y in zip(c_x, c_y)]
cost = -tf.reduce_mean(tf.stack(facts))

sess = tf.Session()
opt = tf.train.GradientDescentOptimizer(
    learning_rate=learning_rate).minimize(cost)

init = tf.global_variables_initializer()
sess.run(init)
for i in range(epochs):
    sess.run(opt)
    if i % 10 == 0:
        print(i, "sat level -----> ", sess.run(cost))