Ejemplo n.º 1
0
def run_kelpie(train_samples):
    print("Wrapping the original model in a Kelpie explainable model...")
    # use model_to_explain to initialize the Kelpie model
    kelpie_model = KelpieComplEx(model=original_model, dataset=kelpie_dataset, init_size=1e-3) # type: KelpieComplEx
    kelpie_model.to('cuda')

    ###########  BUILD THE OPTIMIZER AND RUN POST-TRAINING
    print("Running post-training on the Kelpie model...")
    optimizer = KelpieMultiClassNLLptimizer(model=kelpie_model,
                                            optimizer_name=args.optimizer,
                                            batch_size=args.batch_size,
                                            learning_rate=args.learning_rate,
                                            decay1=args.decay1,
                                            decay2=args.decay2,
                                            regularizer_name="N3",
                                            regularizer_weight=args.reg)

    optimizer.train(train_samples=train_samples, max_epochs=args.max_epochs)

    ###########  EXTRACT RESULTS

    print("\nExtracting results...")
    kelpie_entity_id = kelpie_dataset.kelpie_entity_id
    kelpie_sample_tuple = (kelpie_entity_id, relation_id, tail_id) if args.perspective == "head" else (head_id, relation_id, kelpie_entity_id)
    kelpie_sample = numpy.array(kelpie_sample_tuple)

    ### Evaluation on original entity

    # Kelpie model on original fact
    scores, ranks, predictions = kelpie_model.predict_sample(sample=original_sample, original_mode=True)
    original_direct_score, original_inverse_score = scores[0], scores[1]
    original_head_rank, original_tail_rank = ranks[0], ranks[1]
    print("\nKelpie model on the original test fact: <%s, %s, %s>" % original_triple)
    print("\tDirect fact score: %f; Inverse fact score: %f" % (original_direct_score, original_inverse_score))
    print("\tHead Rank: %f" % original_head_rank)
    print("\tTail Rank: %f" % original_tail_rank)

    # Kelpie model on all facts containing the original entity
    print("\nKelpie model on all test facts containing the original entity:")
    mrr, h1 = KelpieEvaluator(kelpie_model).eval(samples=original_test_samples, original_mode=True)
    print("\tMRR: %f\n\tH@1: %f" % (mrr, h1))


    ### Evaluation on kelpie entity

    # results on kelpie fact
    scores, ranks, _ = kelpie_model.predict_sample(sample=kelpie_sample, original_mode=False)
    kelpie_direct_score, kelpie_inverse_score = scores[0], scores[1]
    kelpie_head_rank, kelpie_tail_rank = ranks[0], ranks[1]
    print("\nKelpie model on the Kelpie test fact: <%s, %s, %s>" % kelpie_sample_tuple)
    print("\tDirect fact score: %f; Inverse fact score: %f" % (kelpie_direct_score, kelpie_inverse_score))
    print("\tHead Rank: %f" % kelpie_head_rank)
    print("\tTail Rank: %f" % kelpie_tail_rank)

    # results on all facts containing the kelpie entity
    print("\nKelpie model on all test facts containing the Kelpie entity:")
    mrr, h1 = KelpieEvaluator(kelpie_model).eval(samples=kelpie_test_samples, original_mode=False)
    print("\tMRR: %f\n\tH@1: %f" % (mrr, h1))

    return kelpie_direct_score, kelpie_inverse_score, kelpie_head_rank, kelpie_tail_rank
Ejemplo n.º 2
0
    assert (original_sample in complex_dataset.test_samples)

    kelpie_dataset = KelpieDataset(dataset=complex_dataset,
                                   entity_id=original_entity_id)
    kelpie_entity_id = kelpie_dataset.kelpie_entity_id
    kelpie_triple = (kelpie_entity_id, relation_id,
                     tail_id) if perspective == 'head' else (head_id,
                                                             relation_id,
                                                             kelpie_entity_id)
    kelpie_sample = numpy.array(kelpie_triple)

    print("Wrapping the original model in a Kelpie model...")
    kelpie_model = KelpieComplEx(dataset=kelpie_dataset,
                                 model=original_model,
                                 init_size=1e-3)  # type: KelpieComplEx
    kelpie_model.to('cuda')

    print("Running post-training on the Kelpie model...")
    optimizer = KelpieMultiClassNLLptimizer(
        model=kelpie_model,
        optimizer_name=optimizer_name,
        batch_size=batch_size,
        learning_rate=learning_rate,
        decay1=decay1,
        decay2=decay2,
        regularizer_name=regularizer_name,
        regularizer_weight=regularizer_weight)

    optimizer.train(train_samples=kelpie_dataset.kelpie_train_samples,
                    max_epochs=max_epochs)