Example #1
0
def main():
    # getting the customized configurations from the command-line arguments.
    args = KGETuneArgParser().get_args(sys.argv[1:])

    # initializing bayesian optimizer and prepare data.
    bays_opt = BaysOptimizer(args=args)

    # perform the golden hyperparameter tuning.
    bays_opt.optimize()
Example #2
0
def tunning_function(name):
    """Function to test the tuning of the  models."""
    # getting the customized configurations from the command-line arguments.
    args = KGETuneArgParser().get_args([])

    # initializing bayesian optimizer and prepare data.
    args.debug = True
    bays_opt = BaysOptimizer(args=args)

    # perform the golden hyperparameter tuning.
    bays_opt.optimize()
Example #3
0
    def tune(self):
        """Fuction to tune the hyper-parameters for the model 
        using training and validation set."""

        # getting the customized configurations from the command-line arguments.
        args = KGETuneArgParser().get_args([])
        args.model = self.model
        args.dataset_name = self.dataset
        args.debug = self.debug
        # initializing bayesian optimizer and prepare data.
        bays_opt = BaysOptimizer(args=args)

        # perform the golden hyperparameter tuning.
        bays_opt.optimize()

        self.best = bays_opt.return_best()
Example #4
0
def tunning_function(name):
    """Function to test the tuning of the models."""
    knowledge_graph = KnowledgeGraph(dataset="freebase15k")
    knowledge_graph.prepare_data()

    # getting the customized configurations from the command-line arguments.
    args = KGETuneArgParser().get_args([])

    # initializing bayesian optimizer and prepare data.
    args.debug = True
    args.model = name

    bays_opt = BaysOptimizer(args=args)
    bays_opt.trainer.config.test_num = 10

    # perform the golden hyperparameter tuning.
    bays_opt.optimize()
Example #5
0
def test_return_empty_before_optimization(mocked_fmin):
    """Function to test the tuning of the models."""
    knowledge_graph = KnowledgeGraph(dataset="freebase15k")
    knowledge_graph.prepare_data()

    # getting the customized configurations from the command-line arguments.
    args = KGETuneArgParser().get_args([])

    # initializing bayesian optimizer and prepare data.
    args.debug = True
    args.model = 'analogy'

    bays_opt = BaysOptimizer(args=args)
    bays_opt.config_local.test_num = 10

    with pytest.raises(Exception) as e:
        bays_opt.return_best()

    assert mocked_fmin.called is False
    assert e.value.args[0] == 'Cannot find golden setting. Has optimize() been called?'
Example #6
0
def main():
    model_name = "transe"
    dataset_name = "Freebase15k"

    # 1. Tune the hyper-parameters for the selected model and dataset.
    # p.s. this is using training and validation set.
    args = KGETuneArgParser().get_args(
        ['-mn', model_name, '-ds', dataset_name])

    # initializing bayesian optimizer and prepare data.
    bays_opt = BaysOptimizer(args=args)

    # perform the golden hyperparameter tuning.
    bays_opt.optimize()
    best = bays_opt.return_best()

    # 2. Evaluate final model using the found best hyperparameters on testing set.
    args = KGEArgParser().get_args(['-mn', model_name, '-ds', dataset_name])

    # Preparing data and cache the data for later usage
    knowledge_graph = KnowledgeGraph(dataset=args.dataset_name)
    knowledge_graph.prepare_data()

    # Extracting the corresponding model config and definition from Importer().
    config_def, model_def = Importer().import_model_config(
        args.model_name.lower())
    config = config_def(args)

    # Update the config params with the golden hyperparameter
    for k, v in best.items():
        config.__dict__[k] = v
    model = model_def(**config.__dict__)

    # Create, Compile and Train the model.
    trainer = Trainer(model, config)
    trainer.build_model()
    trainer.train_model()
Example #7
0
def main(cmd_args):
    args = KGEArgParser().get_args(cmd_args)

    bays_opt = BaysOptimizer(args=args)

    bays_opt.optimize()
def bayesian_optimization(args):
    bayes_opt = BaysOptimizer(args=args)
    bayes_opt.optimize()
    return bayes_opt.return_best()