("joe", "jennifer"), ("janice", "juliet"), ("joe", "juliet"), ("janice", "joey"), ("joe", "joey")] ltnw.predicate("ancestor",embedding_size*2) ltnw.predicate("parent",embedding_size*2) for l in entities: ltnw.constant(l, min_value=[0.] * embedding_size, max_value=[1.] * embedding_size) for a,c in parents: string = "parent(" + a + "," + c + ")" logging.info(string) ltnw.axiom(string) ltnw.variable("a",tf.concat(list(ltnw.CONSTANTS.values()),axis=0)) ltnw.variable("b",tf.concat(list(ltnw.CONSTANTS.values()),axis=0)) ltnw.variable("c",tf.concat(list(ltnw.CONSTANTS.values()),axis=0)) ltnw.axiom("forall a,b: parent(a,b) -> ancestor(a,b)") ltnw.axiom("forall a,b,c: (ancestor(a,b) & parent(b,c)) -> ancestor(a,c)") ltnw.axiom("forall a: ~parent(a,a)") ltnw.axiom("forall a: ~ancestor(a,a)") ltnw.axiom("forall a,b: parent(a,b) -> ~parent(b,a)") ltnw.axiom("forall a,b: ancestor(a,b) -> ~ancestor(b,a)") ltnw.initialize_knowledgebase( optimizer=tf.train.RMSPropOptimizer(learning_rate=0.01, decay=0.9),
("Fish", "Animal"), ("Shark", "Fish"), ("Squirrel", "Mammal")) all_relationships = list(itertools.product(entities, repeat=2)) ltnw.predicate("SubClass", embedding_size * 2) for l in entities: ltnw.constant(l, min_value=[0.] * embedding_size, max_value=[1.] * embedding_size) for a, c in relationships: string = "SubClass(" + a + "," + c + ")" logging.info(string) ltnw.axiom(string) ltnw.variable("a", tf.concat(list(ltnw.CONSTANTS.values()), axis=0)) ltnw.variable("b", tf.concat(list(ltnw.CONSTANTS.values()), axis=0)) ltnw.variable("c", tf.concat(list(ltnw.CONSTANTS.values()), axis=0)) ltnw.axiom( "forall a,b,c: (SubClass(a,b) & SubClass(b,c)) -> SubClass(a,c)") ltnw.axiom("forall a: ~SubClass(a,a)") ltnw.axiom("forall a,b: SubClass(a,b) -> ~SubClass(b,a)") ltnw.initialize_knowledgebase( optimizer=tf.train.RMSPropOptimizer(learning_rate=lr, decay=dc), formula_aggregator=lambda *x: tf.reduce_min(tf.concat(x, axis=0))) # Train the KB
def ltnsnet(): ltnw.predicate("ancestor", embedding_size * 2) ltnw.predicate("parent", embedding_size * 2) for l in entities: ltnw.constant(l, min_value=[0.] * embedding_size, max_value=[1.] * embedding_size) for index, row in training_pa.iterrows(): if row["type"] == 1: string = "parent(" + row["first"] + "," + row["second"] + ")" else: string = "~parent(" + row["first"] + "," + row["second"] + ")" ltnw.axiom(string) for index, row in training_an.iterrows(): if row["type"] == 1: string = "ancestor(" + row["first"] + "," + row["second"] + ")" else: string = "~ancestor(" + row["first"] + "," + row["second"] + ")" ltnw.axiom(string) ltnw.variable("a", tf.concat(list(ltnw.CONSTANTS.values()), axis=0)) ltnw.variable("b", tf.concat(list(ltnw.CONSTANTS.values()), axis=0)) ltnw.variable("c", tf.concat(list(ltnw.CONSTANTS.values()), axis=0)) ltnw.axiom("forall a,b: parent(a,b) -> ancestor(a,b)") ltnw.axiom("forall a,b,c: (ancestor(a,b) & parent(b,c)) -> ancestor(a,c)") ltnw.axiom("forall a: ~parent(a,a)") ltnw.axiom("forall a: ~ancestor(a,a)") ltnw.axiom("forall a,b: parent(a,b) -> ~parent(b,a)") ltnw.axiom("forall a,b: ancestor(a,b) -> ~ancestor(b,a)") ltnw.initialize_knowledgebase( optimizer=tf.train.RMSPropOptimizer(learning_rate=0.01, decay=0.9), formula_aggregator=lambda *x: tf.reduce_min(tf.concat(x, axis=0))) # Train the KB sat_level = ltnw.train(max_epochs=10000) print(sat_level) file_name_an = "an_prediction" file_name_pa = "pa_prediction" with open(file_name_an, "w") as resutls_file: resutls_file.write(str(sat_level) + "\n") print("inferencing an") for index, row in test_an.iterrows(): resutls_file.write(row["first"] + "," + row["second"] + "," + np.array_str( ltnw.ask("ancestor(" + row["first"] + "," + row["second"] + ")").squeeze()) + "\n") with open(file_name_pa, "w") as resutls_file: resutls_file.write(str(sat_level) + "\n") print("inferencing pa") for index, row in test_pa.iterrows(): resutls_file.write(row["first"] + "," + row["second"] + "," + np.array_str( ltnw.ask("parent(" + row["first"] + "," + row["second"] + ")").squeeze()) + "\n")
predicates = list(map(lambda x: pred_name + str(x), range(1, 100))) constants = list(map(lambda x: const_name + str(x), range(1, 100))) for l in constants[:const]: print(l) ltnw.constant(l, min_value=[0.] * embedding_size, max_value=[1.] * embedding_size) if card == 1: ltnw.variable("x", tf.concat(list(ltnw.CONSTANTS.values()), axis=0)) for k in predicates[:pred]: ltnw.predicate(k, embedding_size) string = "forall x : " + k + "(x)" print(string) ltnw.axiom(string) elif card == 2: ltnw.variable("x", tf.concat(list(ltnw.CONSTANTS.values()), axis=0)) ltnw.variable("y", tf.concat(list(ltnw.CONSTANTS.values()), axis=0)) for k in predicates[:pred]: ltnw.predicate(k, embedding_size * 2) string = "forall x,y : " + k + "(x,y)" print(string) ltnw.axiom(string) else: ltnw.variable("x", tf.concat(list(ltnw.CONSTANTS.values()), axis=0)) ltnw.variable("y", tf.concat(list(ltnw.CONSTANTS.values()), axis=0)) ltnw.variable("z", tf.concat(list(ltnw.CONSTANTS.values()), axis=0)) for k in predicates[:pred]: ltnw.predicate(k, embedding_size * 3) string = "forall x,y,z : " + k + "(x,y,z)"