Ejemplo n.º 1
0
    #                            'random_normal_initializer_stdev': 0.00025797511482927632,
    #                            'rate_of_learning': 0.20172634121590136}

    # persist the optimized configuration to a file
    persist_results(
        optimized_configuration,
        optimized_config_directory + '/' + model_identifier + '.txt')

    # optimized_configuration = read_optimal_hyperparameter_values(optimized_config_directory + '/' + model_identifier + '.txt')

    # get the validation errors for the best hyperparameter configs
    smape_error, smape_error_list = train_model(optimized_configuration)

    # print(smape_error_list)

    # write the final list of validation errors to a file
    validation_errors_file = model_training_configs.VALIDATION_ERRORS_DIRECTORY + model_identifier + ".csv"
    with open(validation_errors_file, "w") as output:
        writer = csv.writer(output, lineterminator='\n')
        writer.writerow(smape_error_list)

    print("Optimized configuration: {}".format(optimized_configuration))
    print("Optimized Value: {}\n".format(smape_error))

    # test the model
    for i in range(1, 11):
        args.seed = i
        testing(args, optimized_configuration)

    # testing(args, optimized_configuration)
Ejemplo n.º 2
0
    # select the optimizer
    if optimizer == "cocob":
        optimizer_fn = cocob_optimizer_fn
    elif optimizer == "adagrad":
        optimizer_fn = adagrad_optimizer_fn
    elif optimizer == "adam":
        optimizer_fn = adam_optimizer_fn

    optimized_configuration = {
        'num_hidden_layers':
        optimized_params['num_hidden_layers'],
        'cell_dimension':
        optimized_params['cell_dimension'],
        'l2_regularization':
        optimized_params['l2_regularization'],
        'gaussian_noise_stdev':
        optimized_params['gaussian_noise_stdev'],
        'random_normal_initializer_stdev':
        optimized_params['random_normal_initializer_stdev'],
        'minibatch_size':
        optimized_params['minibatch_size'],
        'max_epoch_size':
        optimized_params['max_epoch_size'],
        'max_num_epochs':
        optimized_params['max_num_epochs'],
        'learning_rate':
        ''
    }

    testing(args, optimized_configuration, "validation")
Ejemplo n.º 3
0
        'seed': seed,
        'cell_type': cell_type,
        'without_stl_decomposition': without_stl_decomposition
    }

    model_trainer = StackingModelTrainer(**model_kwargs)

    # read the initial hyperparamter configurations from the file
    hyperparameter_values_dic = read_initial_hyperparameter_values(
        initial_hyperparameter_values_file)
    optimized_configuration = smac()

    # persist the optimized configuration to a file
    persist_results(
        optimized_configuration,
        optimized_config_directory + '/' + model_identifier + '.txt')

    # get the validation errors for the best hyperparameter configs
    smape_error, smape_error_list = train_model(optimized_configuration)

    # write the final list of validation errors to a file
    validation_errors_file = model_training_configs.VALIDATION_ERRORS_DIRECTORY + model_identifier + ".csv"
    with open(validation_errors_file, "w") as output:
        writer = csv.writer(output, lineterminator='\n')
        writer.writerow(smape_error_list)

    print("Optimized configuration: {}".format(optimized_configuration))
    print("Optimized Value: {}\n".format(smape_error))

    testing(args, optimized_configuration, "test")