Exemplo n.º 1
0
def main():
    # getting the customized configurations from the command-line arguments.
    args = KGETuneArgParser().get_args(sys.argv[1:])

    # initializing bayesian optimizer and prepare data.
    bays_opt = BaysOptimizer(args=args)

    # perform the golden hyperparameter tuning.
    bays_opt.optimize()
Exemplo n.º 2
0
def test_hyperparamter_loader(model_name):
    knowledge_graph = KnowledgeGraph(dataset="freebase15k")
    knowledge_graph.prepare_data()

    # getting the customized configurations from the command-line arguments.
    args = KGETuneArgParser().get_args([])

    hyperparams = HyperparamterLoader(args).load_hyperparameter("freebase15k", model_name)

    assert hyperparams["optimizer"] is not None
Exemplo n.º 3
0
def tunning_function(name):
    """Function to test the tuning of the models."""
    knowledge_graph = KnowledgeGraph(dataset="freebase15k")
    knowledge_graph.prepare_data()

    # getting the customized configurations from the command-line arguments.
    args = KGETuneArgParser().get_args([])

    # initializing bayesian optimizer and prepare data.
    args.debug = True
    args.model = name

    bays_opt = BaysOptimizer(args=args)
    bays_opt.config_local.test_num = 10

    # perform the golden hyperparameter tuning.
    bays_opt.optimize()

    assert bays_opt.return_best() is not None
Exemplo n.º 4
0
def test_return_empty_before_optimization(mocked_fmin):
    """Function to test the tuning of the models."""
    knowledge_graph = KnowledgeGraph(dataset="freebase15k")
    knowledge_graph.prepare_data()

    # getting the customized configurations from the command-line arguments.
    args = KGETuneArgParser().get_args([])

    # initializing bayesian optimizer and prepare data.
    args.debug = True
    args.model = 'analogy'

    bays_opt = BaysOptimizer(args=args)
    bays_opt.config_local.test_num = 10

    with pytest.raises(Exception) as e:
        bays_opt.return_best()

    assert mocked_fmin.called is False
    assert e.value.args[0] == 'Cannot find golden setting. Has optimize() been called?'
Exemplo n.º 5
0
def main():
    model_name = "transe"
    dataset_name = "Freebase15k"

    # 1. Tune the hyper-parameters for the selected model and dataset.
    # p.s. this is using training and validation set.
    args = KGETuneArgParser().get_args(
        ['-mn', model_name, '-ds', dataset_name])

    # initializing bayesian optimizer and prepare data.
    bays_opt = BaysOptimizer(args=args)

    # perform the golden hyperparameter tuning.
    bays_opt.optimize()
    best = bays_opt.return_best()

    # 2. Evaluate final model using the found best hyperparameters on testing set.
    args = KGEArgParser().get_args(['-mn', model_name, '-ds', dataset_name])

    # Preparing data and cache the data for later usage
    knowledge_graph = KnowledgeGraph(dataset=args.dataset_name)
    knowledge_graph.prepare_data()

    # Extracting the corresponding model config and definition from Importer().
    config_def, model_def = Importer().import_model_config(
        args.model_name.lower())
    config = config_def(args)

    # Update the config params with the golden hyperparameter
    for k, v in best.items():
        config.__dict__[k] = v
    model = model_def(**config.__dict__)

    # Create, Compile and Train the model.
    trainer = Trainer(model, config)
    trainer.build_model()
    trainer.train_model()