])

# Third, we make a `SimpleExperiment` — note that the `objective_name` needs to be one of the metric names returned by the evaluation function.

exp = SimpleExperiment(
    name="test_branin",
    search_space=search_space,
    evaluation_function=branin,
    objective_name=
    "branin",  # This name has to coincide with the name of the function to call
    minimize=True,
)

# We use the Sobol generator to create 5 (quasi-) random initial point in the search space. Calling `batch_trial` will cause Ax to evaluate the underlying `branin` function at the generated points, and automatically keep track of the results.

sobol = get_sobol(exp.search_space)
exp.new_batch_trial(generator_run=sobol.gen(5))

# To run our custom botorch model inside the Ax optimization loop, we can use the `get_botorch` factory function from `ax.modelbridge.factory`. Any keyword arguments given to this function are passed through to the `BotorchModel` constructor. To use our custom model, we just need to pass our newly minted `_get_and_fit_simple_custom_gp` function to `get_botorch` using the `model_constructor` argument.
#
# **Note:** `get_botorch` by default automatically applies a number of parameter transformations (e.g. to normalize input data or standardize output data). This is typically what you want for standard use cases with continuous parameters. If your model expects raw parameters, make sure to pass in `transforms=[]` to avoid any transformations to take place. See the [Ax documentation](https://ax.dev/docs/models.html#transforms) for additional information on how transformations in Ax work.

# #### Run the optimization loop
#
# We're ready to run the Bayesian Optimization loop.

for i in range(5):
    print(f"Running optimization batch {i+1}/5...")

    # user added:
    # get_botorch is a self-contained macro-framework that does everything for you:
dset = dset.split('/')[-1]
if type == 'ner':
    transformer = TransformerNER()
else:
    transformer = TransformerCLS()

# Create Experiment
exp = SimpleExperiment(
    name='transformer',
    search_space=transformer_search_space,
    evaluation_function=transformer.trainer,
    objective_name='f1',
)

# Run the optimization and fit a GP on all data
sobol = modelbridge.get_sobol(search_space=exp.search_space)
print(f"\nRunning Sobol initialization trials...\n{'='*40}\n")
for _ in range(init_trials):
    exp.new_trial(generator_run=sobol.gen(1))

for i in range(opt_trials):
    print(
        f"\nRunning GP+EI optimization trial {i+1}/{opt_trials}...\n{'='*40}\n"
    )
    gpei = modelbridge.get_GPEI(experiment=exp, data=exp.eval())
    exp.new_trial(generator_run=gpei.gen(1))

    # save every 5 trials
    if (i + 1) % 5 == 0:
        output_dir = os.path.join(
            'Ax_output', dset,