Пример #1
0
        ltnw.axiom(string)

    ltnw.variable("a",tf.concat(list(ltnw.CONSTANTS.values()),axis=0))
    ltnw.variable("b",tf.concat(list(ltnw.CONSTANTS.values()),axis=0))
    ltnw.variable("c",tf.concat(list(ltnw.CONSTANTS.values()),axis=0))

    ltnw.axiom("forall a,b: parent(a,b) -> ancestor(a,b)")
    ltnw.axiom("forall a,b,c: (ancestor(a,b) &  parent(b,c)) -> ancestor(a,c)")
    ltnw.axiom("forall a: ~parent(a,a)")
    ltnw.axiom("forall a: ~ancestor(a,a)")
    ltnw.axiom("forall a,b: parent(a,b) -> ~parent(b,a)")
    ltnw.axiom("forall a,b: ancestor(a,b) -> ~ancestor(b,a)")


    ltnw.initialize_knowledgebase(
        optimizer=tf.train.RMSPropOptimizer(learning_rate=0.01, decay=0.9),
        formula_aggregator=lambda *x: tf.reduce_min(tf.concat(x, axis=0)))


    # Train the KB
    sat_level = ltnw.train(max_epochs=10000)

    all_relationships = list(itertools.product(entities, repeat=2))

    file_name_an = "an_lr:" + str(lr) + "dc:" + str(dc) + "_em:" + str(embedding_size) + "_ly:" + str(ly) + "_bi:" + str(bi) + "_iter:" + str(iter_epoch) + "_univ:" + str(args.univ) + "_sat:" + np.array_str(sat_level)
    file_name_pa = "pa_lr:" + str(lr) + "dc:" + str(dc) + "_em:" + str(embedding_size) + "_ly:" + str(ly) + "_bi:" + str(bi) + "_iter:" + str(iter_epoch) + "_univ:" + str(args.univ) + "_sat:" + np.array_str(sat_level)

    with open(folder_name + file_name_an, "w") as resutls_file:
        resutls_file.write(str(sat_level) + "\n")
        logging.info("Inferencing Ancestors")
        for a,b in all_relationships:
Пример #2
0
def ltnsnet():
    ltnw.predicate("ancestor", embedding_size * 2)
    ltnw.predicate("parent", embedding_size * 2)

    for l in entities:
        ltnw.constant(l,
                      min_value=[0.] * embedding_size,
                      max_value=[1.] * embedding_size)

    for index, row in training_pa.iterrows():
        if row["type"] == 1:
            string = "parent(" + row["first"] + "," + row["second"] + ")"
        else:
            string = "~parent(" + row["first"] + "," + row["second"] + ")"
        ltnw.axiom(string)

    for index, row in training_an.iterrows():
        if row["type"] == 1:
            string = "ancestor(" + row["first"] + "," + row["second"] + ")"
        else:
            string = "~ancestor(" + row["first"] + "," + row["second"] + ")"
        ltnw.axiom(string)

    ltnw.variable("a", tf.concat(list(ltnw.CONSTANTS.values()), axis=0))
    ltnw.variable("b", tf.concat(list(ltnw.CONSTANTS.values()), axis=0))
    ltnw.variable("c", tf.concat(list(ltnw.CONSTANTS.values()), axis=0))

    ltnw.axiom("forall a,b: parent(a,b) -> ancestor(a,b)")
    ltnw.axiom("forall a,b,c: (ancestor(a,b) &  parent(b,c)) -> ancestor(a,c)")
    ltnw.axiom("forall a: ~parent(a,a)")
    ltnw.axiom("forall a: ~ancestor(a,a)")
    ltnw.axiom("forall a,b: parent(a,b) -> ~parent(b,a)")
    ltnw.axiom("forall a,b: ancestor(a,b) -> ~ancestor(b,a)")

    ltnw.initialize_knowledgebase(
        optimizer=tf.train.RMSPropOptimizer(learning_rate=0.01, decay=0.9),
        formula_aggregator=lambda *x: tf.reduce_min(tf.concat(x, axis=0)))

    # Train the KB
    sat_level = ltnw.train(max_epochs=10000)

    print(sat_level)

    file_name_an = "an_prediction"
    file_name_pa = "pa_prediction"

    with open(file_name_an, "w") as resutls_file:
        resutls_file.write(str(sat_level) + "\n")
        print("inferencing an")
        for index, row in test_an.iterrows():
            resutls_file.write(row["first"] + "," + row["second"] + "," +
                               np.array_str(
                                   ltnw.ask("ancestor(" + row["first"] + "," +
                                            row["second"] + ")").squeeze()) +
                               "\n")

    with open(file_name_pa, "w") as resutls_file:
        resutls_file.write(str(sat_level) + "\n")
        print("inferencing pa")
        for index, row in test_pa.iterrows():
            resutls_file.write(row["first"] + "," + row["second"] + "," +
                               np.array_str(
                                   ltnw.ask("parent(" + row["first"] + "," +
                                            row["second"] + ")").squeeze()) +
                               "\n")