Esempio n. 1
0
def _sample_min_max_well(rng):
    """Sample a min max well problem."""
    is_noise = utils.sample_bool(rng, 0.5)
    return {
        "dim":
        utils.sample_log_int(rng, 10, 1000),
        "noise_stdev":
        utils.sample_log_float(rng, 0.01, 10.0) if is_noise else 0.0,
    }
Esempio n. 2
0
def _sample_optimization_test_problems(rng):
    """Sample an optimization test function problem."""
    is_noise = utils.sample_bool(rng, 0.5)
    return {
        "problem":
        rng.choice(sorted(_opt_test_problems.keys())),
        "noise_stdev":
        utils.sample_log_float(rng, 0.01, 10.0) if is_noise else 0.0,
    }
Esempio n. 3
0
def _sample_quadratic_problem(rng):
    """Sample a quadratic problem."""
    is_noise = utils.sample_bool(rng, 0.5)
    return {
        "dim":
        utils.sample_log_int(rng, 10, 1000),
        "noise_stdev":
        utils.sample_log_float(rng, 0.01, 10.0) if is_noise else 0.0,
    }
Esempio n. 4
0
def _sample_sparse_softmax_regression(rng):
    """Sample a sparse softmax regression problem."""
    is_noise = utils.sample_bool(rng, 0.5)
    return {
        "n_features": utils.sample_log_int(rng, 2, 100),
        "n_classes": 2,
        "noise_stdev":
        utils.sample_log_float(rng, 0.01, 10.0) if is_noise else 0.0,
        "bs": utils.sample_log_int(rng, 1, 50),
        "n_samples": utils.sample_log_int(rng, 1, 30),
    }
Esempio n. 5
0
def _sample_bowl_problems(rng):
    """Sample a bowl problem."""
    is_noise = utils.sample_bool(rng, 0.5)
    return {
        "cond":
        utils.sample_log_float(rng, 0.01, 100),
        "angle":
        rng.choice([0, 0, np.pi / 4., np.pi / 3]),
        "noise_stdev":
        utils.sample_log_float(rng, 0.01, 10.0) if is_noise else 0.0,
    }
Esempio n. 6
0
def sample_char_rnn_language_model_family_cfg(seed):
    """Samples a character NN language modeling task."""
    rng = np.random.RandomState(seed)
    cfg = {}
    cfg["embed_dim"] = utils.sample_log_int(rng, 8, 128)
    cfg["w_init"] = utils.sample_initializer(rng)

    full_vocab = utils.sample_bool(rng, 0.8)
    if full_vocab:
        cfg["vocab_size"] = 256
    else:
        # only operate on some subset of full words.
        cfg["vocab_size"] = utils.sample_log_int(rng, 100, 256)
    cfg["core"] = utils.sample_rnn_core(rng)
    cfg["trainable_init"] = bool(rng.choice([True, False]))

    cfg["dataset"] = utils.sample_char_lm_dataset(rng)
    return cfg
Esempio n. 7
0
def _sample_fully_connected(rng):
    """Sample a fully connected problem."""
    n_layer = rng.choice([2, 3, 4, 5])
    fixed = utils.sample_bool(rng, 0.5)
    cfg = {
        "n_features": utils.sample_log_int(rng, 1, 16),
        "n_classes": 2,
        "activation": utils.sample_activation(rng),
        "bs": utils.sample_log_int(rng, 1, 200),
        "n_samples": utils.sample_log_int(rng, 1, 30),
    }
    if fixed:
        cfg["hidden_sizes"] = [utils.sample_log_int(rng, 4, 32)] * n_layer
    else:
        cfg["hidden_sizes"] = [
            utils.sample_log_int(rng, 4, 32) for _ in range(n_layer)
        ]

    return cfg
Esempio n. 8
0
def _sample_sparse_problem(rng):
    """Sample a sparse problem.

  This problem modifies a sampled base problem by setting some gradients to

  zero.

  Args:
    rng: Random state

  Returns:
    The sampled config.
  """
    is_noise = utils.sample_bool(rng, 0.5)
    base_config = rng.choice(_to_modify)
    return {
        "base": (base_config, _problem_sample_get[base_config][0](rng)),
        "zero_probability":
        rng.uniform(0.9, 0.99),
        "noise_stdev":
        utils.sample_log_float(rng, 0.01, 10.0) if is_noise else 0.0,
    }