Exemplo n.º 1
0
def convert_config_to_shadho(config):
    """Convert HPOBench config to a SHADHO search space.
    Parameters
    ----------
    config : dict or `hpobench.Configuration`
        HPOBench model config drawn from `
    Returns
    -------
    space : dict or pyrameter.Specification
        The SHADHO translation of the HPOBench searh space configuration.
    """
    # Create the shadho search space here and return it.
    space = {}

    for param in config.get_all_unconditional_hyperparameters():
        param_type = type(config.get_hyperparameter(param)).__name__
        lower = config.get_hyperparameter(param).lower
        upper = config.get_hyperparameter(param).upper
        log = config.get_hyperparameter(param).log
        print(param, param_type, log)

        # TODO: THE BELOW BREAKS FOR DIFFERENT TESTS WHEN USING LOG SPACES
        if param_type == 'UniformFloatHyperparameter' and log == False:
            space[param] = spaces.uniform(np.float64(lower), np.float64(upper))
        elif param_type == 'UniformIntegerHyperparameter' and log == False:
            space[param] = spaces.randint(int(lower), int(upper))
        elif param_type == 'UniformIntegerHyperparameter' and log == True:
            space[param] = spaces.randint(int(lower), int(upper))
        elif param_type == 'UniformFloatHyperparameter' and log == True:
            space[param] = spaces.uniform(np.float64(lower), np.float64(upper))
        else:
            raise TypeError(
                f'Unhandled HPOBench hyperparameter type {param_type}.' + \
                'Submit a bug report with the benchmark name and this message.'
            )

    return space
Exemplo n.º 2
0
import math

# Import the driver and random search from SHADHO
from shadho import Shadho, spaces


# Define the function to optimize, which returns a single floating-point value
# to optimize on. Hyperparameters are passed in as a dictionary with the
# same structure as `space` below.
def sin(params):
    return math.sin(params['x'])


if __name__ == '__main__':
    # Set up the search space, in this case a uniform distribution over the
    # domain [0, pi]
    space = {'x': spaces.uniform(0, math.pi)}

    # Pass the `sin` function, the search space, and a timeout into the SHADHO
    # driver and configure SHAHDO to run locally.
    opt = Shadho('sin_local_example', sin, space, timeout=30)
    opt.config.manager = 'local'

    # Run SHADHO, and the optimal `x` value will be printed after 30s.
    opt.run()
Exemplo n.º 3
0
    if args.master_name == '':
        parser.error('must provide a distinct master name')
    # TODO check if provided paths are valid
    #if args.output_results_path invalid, then parser error.

    return args


if __name__ == '__main__':
    args = parse_args()

    # Domains can be stored as variables and used more than once in the event
    # that the domain is used multilpe times.
    C = spaces.log2_uniform(-5, 15)
    gamma = spaces.log10_uniform(-3, 3)
    coef0 = spaces.uniform(-1000, 1000)

    # The search space in this case is hierarchical with mutually exclusive
    # subspaces for each SVM kernel. The 'exclusive' tag instructs SHADHO to
    # select one of the subspaces from among 'linear', 'rbf', 'sigmoid', and
    # 'poly' at a time and only generate hyperprameters for that subspace.
    space = {
        'exclusive': True,
        'linear': {
            'kernel': 'linear',  # add the kernel name for convenience
            'C': C
        },
        'rbf': {
            'kernel': 'rbf',  # add the kernel name for convenience
            'C': C,
            'gamma': gamma
Exemplo n.º 4
0
initializers = spaces.choice(['zeros', 'ones', 'identity', 'glorot_normal',
                              'glorot_uniform', 'he_normal', 'he_uniform',
                              'random_normal', 'random_uniform', ])

# Search over some of the built-in regularizers
regularizers = spaces.choice([None, 'l1', 'l2', 'l1_l2'])

# Search over some of the built-in constants
constraints = spaces.choice([None, 'non_neg', 'unit_norm'])

# Search over the built-in activations, parameterizing where necessary
activations = spaces.scope(
    exclusive=True,
    elu='elu',
    hard_sigmoid='hard_sigmoid',
    leaky_relu=spaces.scope('alpha': spaces.uniform(0, 1)),
    prelu=spaces.scope(
        alpha_initializer=initializers,
        alpha_regularizer=regularizers,
        alpha_constraints=constraints),
    relu='relu',
    sigmoid='sigmoid',
    softmax='softmax',
    softplus='softplus',
    softsign='softsign',
    tanh='tanh',
    thresholded_relu=spaces.scope(theta=spaces.uniform(-1, 1)))

# Set up a standard convolutional block that will search over all params that
# can be tuned for U-Net
conv = spaces.scope(
Exemplo n.º 5
0
    """
    x = params['x']
    y = params['y']

    return np.sin(x) * np.cos(y)


if __name__ == '__main__':
    # We set up the search space for the objective with two domains:
    #    x: a continuous uniform distribution over [0, pi]
    #    y: a discrete set of 1000 evenly-spaced numbers in [0, pi]
    #
    # Note that the dictionary passed to the objective retains the structure
    # defined here.
    search_space = {
        'x': spaces.uniform(0, 2 * math.pi),
        'y': spaces.choice(list(np.linspace(0, 2 * math.pi, 1000)))
    }

    # We next set up the optimizer, which will attempt to minimize the
    # objective locally. It takes an experiment key, the objective function,
    # the search space, a search method, and a timeout.

    opt = Shadho(
        'convex-tutorial',  # Name of this experiment
        objective,  # The function to optimize
        search_space,  # The search space to sample
        method=
        'random',  # The sampling method, one of 'random', 'bayes', 'tpe', 'smac'
        timeout=30  # The time to run the search, in seconds.
    )
Exemplo n.º 6
0
Prices dataset.

On top of setting up the search, this tutorial demonstrates the use of the
"exclusive" flag to split non-overlapping search spaces into separate trees.
"""

from shadho import Shadho, spaces

if __name__ == '__main__':

    # Set up the search space. In this case, we care searching over SVM kernel
    # hyperparameterizations. Because some spaces are used with multiple
    # kernels, we can create the spaces outside of the dictionary and use them
    # multiple times. SHADHO makes sure no aliasing occurs.

    C = spaces.uniform(-1000, 2000)
    gamma = spaces.log10_uniform(-5, 8)
    coef0 = spaces.uniform(-1000, 2000)
    degree = [2, 3, 4, 5, 6, 7]

    # The joint hyperparameter domains for each kernel should be searched
    # independent of one another, so we use the "exclusive" flag to tell
    # SHADHO to sample each space independently.

    search_space = {
        'linear': {
            'C': C,
        },
        'rbf': {
            'C': C,
            'gamma': gamma,
Exemplo n.º 7
0
"""This script runs the hyperparameter search on remote workers.
"""

# These are imported, same as before
from shadho import Shadho, spaces

import math

# The space is also defined exactly the same.
space = {'x': spaces.uniform(0.0, 2.0 * math.pi)}

if __name__ == '__main__':
    # This time, instead of configuring shadho to run locally,
    # we direct it to the input files that run the optimization task.

    # Instead of the objective function, shadho is given a command that gets
    # run on the remote worker.
    opt = Shadho('shadho-wq-packaging-test',
                 'bash run_sin.sh',
                 space,
                 timeout=60)

    # Two input files are also added: the first is run directly by the worker
    # and can be used to set up your runtime environment (module load, anyone?)
    # The second is the script we're trying to optimize.
    opt.add_input_file('run_sin.sh')
    opt.add_input_file('sin.py')
    opt.run()