Beispiel #1
0
def function(label, *args, **kwargs):
    if label in FUNCTIONS and args == () and kwargs == {}:
        return FUNCTIONS[label]
    elif label in FUNCTIONS and CONFIGURATION.get("error_on_redeclare"):
        logging.getLogger(__name__).error(
            "Attempt at redeclaring existing function %s" % label)
        raise Exception("Attempt at redeclaring existing function %s" % label)
    else:
        if label in FUNCTIONS:
            logging.getLogger(__name__).warning(
                "Redeclaring existing function %s" % label)
        FUNCTIONS[label] = ltn.Function(label, *args, **kwargs)
        return FUNCTIONS[label]
distant_data = np.array([
    np.concatenate([data[i], data[j]]) for i in range(len(data))
    for j in range(len(data))
    if np.sum(np.square(data[i] - data[j])) > np.square(1.)
])

# defining the language

x = ltn.variable("x", data)
y = ltn.variable("y", data)
closed_x_y = ltn.variable("closed_x_y", closed_data)
distant_x_y = ltn.variable("distant_x_y", distant_data)

C = {i: ltn.Predicate("C_" + str(i), x).ground for i in clst_ids}

first = ltn.Function("first", closed_x_y,
                     fun_definition=lambda d: d[:, :2]).ground
second = ltn.Function("second", closed_x_y,
                      fun_definition=lambda d: d[:, 2:]).ground

print("defining the theory T")
T = tf.reduce_mean(
    tf.concat(
        [Forall(x, Or(*[C[i](x) for i in clst_ids]))] +
        [Exists(x, C[i](x)) for i in clst_ids] + [
            Forall(closed_x_y,
                   Equiv(C[i](first(closed_x_y)), C[i](second(closed_x_y))))
            for i in clst_ids
        ] + [
            Forall(
                distant_x_y,
                Not(And(C[i](first(distant_x_y)),
Beispiel #3
0
train_Y = slope * train_X + np.random.normal(scale=var, size=len(train_X))

W = tf.Variable(np.random.randn(), name="weight")
b = tf.Variable(np.random.randn(), name="bias")


def apply_fun(X):
    return tf.add(tf.multiply(X, W), b)


c_x = [ltn.Constant("x_%s" % i, [x]).ground for i, x in enumerate(train_X)]
c_y = [ltn.Constant("y_%s" % i, [y]).ground for i, y in enumerate(train_Y)]

f = ltn.Function(  # this function is estimating the linear regression f(x), ie mapping x to y
    label="f",
    input_shape_spec=1,
    output_shape_spec=1,
    fun_definition=apply_fun).ground

eq = ltn.Predicate("equal", 2, lambda x, y: ltnl.equal_euclidian(x, y)).ground

facts = [eq(f(x), y) for x, y in zip(c_x, c_y)]
cost = -tf.reduce_mean(tf.stack(facts))

sess = tf.Session()
opt = tf.train.GradientDescentOptimizer(
    learning_rate=learning_rate).minimize(cost)

init = tf.global_variables_initializer()
sess.run(init)
for i in range(epochs):
Beispiel #4
0
#domain = domains-list of object constants
#tensor = concat of cross prodcut of list of all individual tensors- column wise(14*60)

p1 = ltn.Domain_slice(p1p2, 0, number_of_features)
#Use Domain_product object to create Domain_slice object with prop:
#columns = 30 - 0 = 30
#label - "projection_of_cross_product_of.._from_col_0_to_30"
#first part of p1p2

p2 = ltn.Domain_slice(p1p2, number_of_features, number_of_features * 2)
#second part of p1p2

p2p1 = ltn.Domain_concat([p2, p1])
#reversing p1p2, each of individual persons

some_friend = ltn.Function("some_friend", person, person)
#create Function object some_friend with prop:
# label = "some_friend"
# in_columns = 30
# columns = 30
# family = "linear"
# domain = person
# M = 31 * 31 random values
# N = 31 * 30 random values
# parameters = N
# --tensor(person)
# --extended_domain = 14*1(ones), 14*30(domain tensors) => 14 * 31
# --result = matmul(extended_domain,N) 14*31

some_friend_of_person = ltn.Term(some_friend, person)
#create Term object some_friend_of_person