Esempio n. 1
0
def test_filter_unseen_entities_with_strict_mode():
    from collections import namedtuple
    base_model = namedtuple('test_model', 'ent_to_idx')

    X = np.array([['a', 'x', 'b'], ['c', 'y', 'd'], ['e', 'y', 'd']])

    model = base_model({'a': 1, 'b': 2, 'c': 3, 'd': 4})

    with pytest.raises(RuntimeError):
        _ = filter_unseen_entities(X, model, strict=True)
Esempio n. 2
0
def test_filter_unseen_entities_without_strict_mode():
    from collections import namedtuple
    base_model = namedtuple('test_model', 'ent_to_idx')

    X = np.array([['a', 'x', 'b'], ['c', 'y', 'd'], ['e', 'y', 'd']])

    model = base_model({'a': 1, 'b': 2, 'c': 3, 'd': 4})

    X_filtered = filter_unseen_entities(X, model, strict=False)

    X_expected = np.array([['a', 'x', 'b'], ['c', 'y', 'd']])

    np.testing.assert_array_equal(X_filtered, X_expected)
         model = RandomBaseline(verbose=True)
     tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)  # TensorFlow will tell you all messages that have the label ERROR
     model.fit(train_X)
     
     # Save model at its best-performance point
     save_model(model, 'best_ampliGraph_model.pkl')
     del model  # Delete older model
     # Load recently save best-performance model
     model = restore_model('./best_ampliGraph_model.pkl')    
     if model.is_fitted:
         print('The model is fit!')
     else:
         print('The model is not fit! Did you skip a step?')
     
     # TRAINING: Evaluate model's performance
     test_X = filter_unseen_entities(test_X, model, verbose=True, strict=False)
     test_y = test_X[:,1]
     scores_validtn = evaluate_performance(test_X, 
                      model=model, 
                      filter_triples=positives_filter,  # positives_filter # Corruption strategy filter defined above
                      use_default_protocol=True,  # corrupt subj and obj separately while evaluating
                      strict=False,
                      verbose=True)
     
     end_time = time.time()  # STOP: Training Time Tracker
     print("\nTraining Time:", end_time - start_time, "seconds")  # PRINT: Training Time Tracker
     print("Training Time:", end_time - start_time, "seconds", file=log_file)
 
     pred_y_res = model.predict(test_X)
     pred_y_proba = expit(pred_y_res)