Пример #1
0
def optimize_sp_for_fixed_beta(params: pr.Parameters, groups, real_beta):
    # print("!!! Beginning experiment !!!")
    (x, y) = tst.generate_training_data(real_beta, params)

    def f(log_sparsity_param):
        params.sparsity_param = math.pow(2, log_sparsity_param)
        (learned_beta, runtime, cycles,
         convergence_type) = pr.learn(x, y, groups, params)
        avg_error = tst.test(learned_beta, real_beta, params)
        return avg_error

    #Optimize sparsity parameter several times (stochastic optimization)
    opt_log_params = []
    for i in range(5):
        best_log_param = minimize(f, 0.0, 12.0, 0.3)
        opt_log_params.append(best_log_param)
        print("optimizing sparsity param...", math.pow(2, best_log_param))
    # print("sparsity params for fixed beta:", opt_log_params)
    opt_sparsity_param = math.pow(2, np.mean(opt_log_params))

    # print("TRAINING SPARSITY:", params.training_feature_sparsity)
    print("Average optimum sparsity parameter: ", opt_sparsity_param, "= 2^",
          np.mean(opt_log_params))

    # Re-run experiment with optimal sparsity parameter
    params.sparsity_param = opt_sparsity_param
    (learned_beta, runtime, cycles,
     convergence_type) = pr.learn(x, y, groups, params)
    avg_error = tst.test(learned_beta, real_beta, params)
    print("Performance on beta with optimum sparsity parameter",
          params.sparsity_param, "runtime:", int(runtime), "cycles:", cycles,
          "avg error:", round(avg_error, 3), "convergence:", convergence_type)

    return runtime, cycles, avg_error, convergence_type, opt_sparsity_param
def run_experiment(params, generate_beta):
    groups = tst.generate_groups(params)
    real_beta = generate_beta(params, groups)
    (x, y) = tst.generate_training_data(real_beta,params)

    (learned_beta, runtime, cycles, convergence_type) = pr.learn(x, y, groups, params)

    avg_error = tst.test(learned_beta, real_beta, params)
    return runtime, cycles, avg_error, convergence_type
Пример #3
0
def experiment_with_fixed_params(params: pr.Parameters, gen_beta):
    # print("!!! Beginning experiment !!!")
    groups = tst.generate_groups(params)
    real_beta = gen_beta(params, groups)
    (x, y) = tst.generate_training_data(real_beta, params)

    # Re-run experiment with optimal sparsity parameter
    (learned_beta, runtime, cycles,
     convergence_type) = pr.learn(x, y, groups, params)
    avg_error = tst.test(learned_beta, real_beta, params)
    print("Number of Groups:", params.num_groups,
          "runtime:", int(runtime), "cycles:", cycles, "avg error:",
          round(avg_error, 3), "convergence:", convergence_type)

    return runtime, cycles, avg_error, convergence_type
Пример #4
0
def run_experiment(params: pr.Parameters, gen_beta):

    #Generate learning algorithm
    groups = tst.generate_groups(params)
    real_beta = gen_beta(params, groups)
    (x, y) = tst.generate_training_data(real_beta, params)

    #Learning algorithm
    (learned_beta, runtime, cycles,
     convergence_type) = pr.learn(x, y, groups, params)

    #Test accuracy
    avg_error = tst.test(learned_beta, real_beta, params)
    print("runtime:", int(runtime), "cycles:", cycles, "avg error:",
          round(avg_error, 3), "convergence:", convergence_type)

    return runtime, cycles, avg_error, convergence_type
Пример #5
0
 def f(log_sparsity_param):
     params.sparsity_param = math.pow(2, log_sparsity_param)
     (learned_beta, runtime, cycles,
      convergence_type) = pr.learn(x, y, groups, params)
     avg_error = tst.test(learned_beta, real_beta, params)
     return avg_error