Пример #1
0
def p1b1_parameter_set():
    """Utility function to encapsulate ParameterSet definition"""

    ps = prs.ParameterSet()

    ps["activation"] = prs.DiscreteParameter(activation)
    ps["batch_size"] = prs.NumericListParameter(batch_size)
    ps["dense"] = prs.DiscreteParameter(dense)
    ps["drop"] = prs.NumericParameter(0.0, 0.9)
    ps["epochs"] = prs.IntegerParameter(10, 20)  #100, 200)
    ps["latent_dim"] = prs.NumericListParameter(latent_dim)
    ps["learning_rate"] = prs.NumericParameter(0.00001, 0.1)
    ps["model"] = prs.DiscreteParameter(model)
    ps["optimizer"] = prs.DiscreteParameter(optimizer)
    ps["residual"] = prs.DiscreteParameter(residual)
    ps["reduce_lr"] = prs.DiscreteParameter(reduce_lr)
    ps["warmup_lr"] = prs.DiscreteParameter(warmup_lr)

    #    # switching batch_size to NumericList to enforce integer validation
    #    ps.add(prs.DiscreteParameter("activation", activation))
    #    ps.add(prs.NumericListParameter("batch_size", batch_size))
    #    ps.add(prs.DiscreteParameter("dense", dense))
    #    ps.add(prs.NumericParameter("drop", 0.0, 0.9))
    #    ps.add(prs.IntegerParameter("epochs",  10, 20)) #100, 200))
    #    ps.add(prs.NumericListParameter("latent_dim", latent_dim))
    #    ps.add(prs.NumericParameter("learning_rate", 0.00001, 0.1))
    #    ps.add(prs.DiscreteParameter("model", model))
    #    ps.add(prs.DiscreteParameter("optimizer", optimizer))
    #    ps.add(prs.DiscreteParameter("residual", residual))
    #    ps.add(prs.DiscreteParameter("reduce_lr", reduce_lr))
    #    ps.add(prs.DiscreteParameter("warmup_lr", warmup_lr))

    return ps
Пример #2
0
def p3b1_parameter_set():
    """Utility function to encapsulate ParameterSet definition"""

    ps = prs.ParameterSet()

    # switching batch_size to NumericList to enforce integer validation
    #ps.add(prs.DiscreteParameter("batch_size", batch_size))
    ps.add(prs.NumericListParameter("batch_size", batch_size))
    ps.add(prs.IntegerParameter("epochs", 5, 50))
    #ps.add(prs.DiscreteParameter("activation", activation))
    #ps.add(prs.DiscreteParameter("optimizer", optimizer))
    ps.add(prs.NumericParameter("dropout", 0.0, 0.9))
    ps.add(prs.NumericParameter("learning_rate", 0.00001, 0.1))
    ps.add(prs.DiscreteParameter("shared_nnet_spec", shared_nnet_spec))
    ps.add(prs.DiscreteParameter("ind_nnet_spec", ind_nnet_spec))

    return ps
Пример #3
0
def p1b1_parameter_set():
    """Utility function to encapsulate ParameterSet definition"""

    ps = prs.ParameterSet()

    # batch_size is NumericList to enforce integer validation
    ps["activation"] = prs.DiscreteParameter(activation)
    ps["batch_size"] = prs.NumericListParameter(batch_size)
    ps["dense"] = prs.DiscreteParameter(dense)
    ps["drop"] = prs.NumericParameter(0.0, 0.9)
    # limit maximum number of epcohs for demonstration purposes
    ps["epochs"] = prs.IntegerParameter(10, 20)  #100, 200)
    ps["latent_dim"] = prs.NumericListParameter(latent_dim)
    ps["learning_rate"] = prs.NumericParameter(0.00001, 0.1)
    ps["model"] = prs.DiscreteParameter(model)
    ps["optimizer"] = prs.DiscreteParameter(optimizer)
    ps["residual"] = prs.DiscreteParameter(residual)
    ps["reduce_lr"] = prs.DiscreteParameter(reduce_lr)
    ps["warmup_lr"] = prs.DiscreteParameter(warmup_lr)

    return ps
dense = [[500, 100, 50], [1000, 500, 100, 50], [2000, 1000, 500, 100, 50],
         [2000, 1000, 1000, 500, 100, 50],
         [2000, 1000, 1000, 1000, 500, 100, 50]]
#optimizer = ["adam", "sgd", "rmsprop", "adagrad", "adadelta","adamax","nadam"]
conv = [[50, 50, 50, 50, 50, 1], [25, 25, 25, 25, 25, 1],
        [64, 32, 16, 32, 64, 1], [100, 100, 100, 100, 100, 1],
        [32, 20, 16, 32, 10, 1]]

ps = prs.ParameterSet()

ps.add(prs.DiscreteParameter("batch_size", batch_size))
ps.add(prs.IntegerParameter("epochs", 5, 100))
#ps.add(prs.DiscreteParameter("activation", activation))
ps.add(prs.DiscreteParameter("dense", dense))
#ps.add(prs.DiscreteParameter("optimizer", optimizer))
ps.add(prs.NumericParameter("drop", 0.0, 0.9))
ps.add(prs.NumericParameter("learning_rate", 0.00001, 0.1))
ps.add(prs.DiscreteParameter("conv", conv))
# TODO: since dense and conv will be dummy-coded, ensure that all possible
# category values are present in the parameter set

# =============================================================================
# DATA
# =============================================================================

# TODO: relocate pdtypes to nt3_run_data
# coerce data into correct types in dataframe
float64 = 'float64'
int64 = 'int64'
pdtypes = {
    'batch_size': int64,
Пример #5
0
         [2000, 1000, 1000, 1000, 500, 100, 50]]
optimizer = [
    "adam", "sgd", "rmsprop", "adagrad", "adadelta", "adamax", "nadam"
]
conv = [[50, 50, 50, 50, 50, 1], [25, 25, 25, 25, 25, 1],
        [64, 32, 16, 32, 64, 1], [100, 100, 100, 100, 100, 1],
        [32, 20, 16, 32, 10, 1]]

ps = prs.ParameterSet()

ps["batch_size"] = prs.DiscreteParameter(batch_size)
ps["epochs"] = prs.IntegerParameter(5, 500)
ps["activation"] = prs.DiscreteParameter(activation)
ps["dense"] = prs.DiscreteParameter(dense)
ps["optimizer"] = prs.DiscreteParameter(optimizer)
ps["drop"] = prs.NumericParameter(0.0, 0.9)
ps["learning_rate"] = prs.NumericParameter(0.00001, 0.1)
ps["conv"] = prs.DiscreteParameter(conv)

print(ps)


# =============================================================================
# Add run_id and subdirectory of /save for logged output.
# Ensure that all parameters are populated with default values
# Any last-minute or ad hoc changes can be added here
# =============================================================================
def param_update(params, default_params, run_id, subdirectory='exp'):
    run_params = default_params.copy()
    run_params.update(params)
    run_params['save'] = 'save/{}'.format(subdirectory)