Exemplo n.º 1
0
On top of this, the code introduces the Object-Oriented interface for SHADHO
spaces, as well as repeating and dependent spaces.
"""

from shadho import Shadho, spaces


if __name__ == '__main__':
    # As a part of the architecture search, we are interested in optimizing
    # the number of layers, size/shape of each layer, activation function,
    # and whether or not to attach a batch normalization layer.

    # Like with the SVM example, search spaces can be defined once and reused
    # in multiple places.
    activations = ['glu', 'leaky_relu', 'prelu', 'relu', 'selu', 'sigmoid', 'tanh']
    batch_norm = spaces.log10_uniform(-4, 4)

    # For each convolutional layer, we sample over the number of convolutional
    # kernels, the kernel shape, activation function, and batch normalization.
    conv_layer = spaces.scope(
        out_filters=spaces.log2_randint(4, 10),
        kernel_shape=spaces.randint(1, 10, step=2),
        activation=activations,
        batch_norm=batch_norm
    )

    # Additionally, we want to not worry about computing padding during model
    # construction. SHADHO offers *dependent* hyperparameter domains that
    # compute their value based on the value of another domain. The `padding`
    # domain here implements "same" padding.
Exemplo n.º 2
0
conv = spaces.scope(
    kernel_size=spaces.randint(1, 12, 2),
    activation=activations,
    kernel_initializer=initializers,
    bias_initializer=initializers,
    kernel_regularizer=regularizers,
    bias_regularizer=regularizers,
    activity_regularizer=regularizers,
    kernel_constrains=constraints,
    bias_constraint=constraints)

# Search over the built-in optimizers, parameterizing SGD
optimizers = spaces.scope(
    exclusive=True
    sgd=spaces.scope(
        lr=spaces.log10_uniform(-4, -1),
        momentum=spaces.uniform(0, 1),
        decay=spaces.log10_uniform(-4, -1)),
    rmsprop='rmsprop',
    adagrad='adagrad',
    adadelta='adadelta',
    adam='adam',
    adamax='adamax',
    nadam='nadam')

# Set up the full search space over the U-Net down- and upsampling blocks
space = spaces.scope(
    optimizer=optimizers,
    min_filters=spaces.log2_randint(5, 8),
    down1=spaces.scope(conv1=conv, conv2=conv),
    down2=spaces.scope(conv1=conv, conv2=conv),
Exemplo n.º 3
0
        args.result_file = './'
    if args.master_name == '':
        parser.error('must provide a distinct master name')
    # TODO check if provided paths are valid
    #if args.output_results_path invalid, then parser error.

    return args


if __name__ == '__main__':
    args = parse_args()

    # Domains can be stored as variables and used more than once in the event
    # that the domain is used multilpe times.
    C = spaces.log2_uniform(-5, 15)
    gamma = spaces.log10_uniform(-3, 3)
    coef0 = spaces.uniform(-1000, 1000)

    # The search space in this case is hierarchical with mutually exclusive
    # subspaces for each SVM kernel. The 'exclusive' tag instructs SHADHO to
    # select one of the subspaces from among 'linear', 'rbf', 'sigmoid', and
    # 'poly' at a time and only generate hyperprameters for that subspace.
    space = {
        'exclusive': True,
        'linear': {
            'kernel': 'linear',  # add the kernel name for convenience
            'C': C
        },
        'rbf': {
            'kernel': 'rbf',  # add the kernel name for convenience
            'C': C,
Exemplo n.º 4
0
On top of setting up the search, this tutorial demonstrates the use of the
"exclusive" flag to split non-overlapping search spaces into separate trees.
"""

from shadho import Shadho, spaces

if __name__ == '__main__':

    # Set up the search space. In this case, we care searching over SVM kernel
    # hyperparameterizations. Because some spaces are used with multiple
    # kernels, we can create the spaces outside of the dictionary and use them
    # multiple times. SHADHO makes sure no aliasing occurs.

    C = spaces.uniform(-1000, 2000)
    gamma = spaces.log10_uniform(-5, 8)
    coef0 = spaces.uniform(-1000, 2000)
    degree = [2, 3, 4, 5, 6, 7]

    # The joint hyperparameter domains for each kernel should be searched
    # independent of one another, so we use the "exclusive" flag to tell
    # SHADHO to sample each space independently.

    search_space = {
        'linear': {
            'C': C,
        },
        'rbf': {
            'C': C,
            'gamma': gamma,
        },