コード例 #1
0
def regression_meta_model(data, output_file='results.pkl', max_iter=81, eta=3):
    if not output_file.endswith('.pkl'):
        output_file += '.pkl'
    print("Will save results to", output_file)

    #
    try_params_data = partial(try_params_r, data=data)

    hb = Hyperband(get_params_r, try_params_data, max_iter=max_iter, eta=eta)
    results = hb.run(skip_last=1)

    print("{} total, best:\n".format(len(results)))

    for r in sorted(results, key=lambda x: x['loss'])[:5]:
        print("loss: {:.2%} | {} seconds | {:.1f} iterations | run {} ".format(
            r['loss'], r['seconds'], r['iterations'], r['counter']))
        pprint(r['params'])
        print()

    print("saving...")

    with open(output_file, 'wb') as f:
        pickle.dump(results, f)

    return results
コード例 #2
0
ファイル: main.py プロジェクト: sho-ss/hyperband_sandbox
def main():
    parser = argparse.ArgumentParser(description='Hyperband main script')
    parser.add_argument('bench',
                        action='store',
                        nargs=None,
                        const=None,
                        default=None,
                        type=str,
                        choices=['MLPWithMNIST'],
                        help='the benchmark function you want to run',
                        metavar=None)
    parser.add_argument(
        '--max_iter',
        type=int,
        default=27,
        help=
        'maximum amount of resource that can be allocated to a single configuration'
    )
    parser.add_argument(
        '--eta',
        type=int,
        default=3,
        help='proportion of the configurations discarded in each round of SH')
    parser.add_argument('--patience',
                        type=int,
                        default=5,
                        help='threshold for original early-stopping')
    parser.add_argument('--gcp', action='store_true')

    args = parser.parse_args()
    params = get_param_with_bench(args.bench)
    params['max_iter'] = args.max_iter
    params['eta'] = args.eta
    params['patience'] = args.patience
    params['homedir'] = '/hyperband_sandbox/' if args.gcp else './'

    # run optimization
    hb = Hyperband(**params)
    best = hb.run()
    print("best:{}".format(best))

    separate_history = hb.separate_history
    print("separate_history:{}".format(separate_history))
    i = 0
    for k, v in separate_history.items():
        df = pd.DataFrame(v)
        df.to_csv("./log_{}.csv".format(i))
        i += 1

    plot_util.plot_separately(separate_history, homedir=params['homedir'])
コード例 #3
0
    'do1' : hp.uniform('do1', 0.2, 0.3),
    'do2' : hp.uniform('do2', 0.2, 0.3),
    'do3' : hp.uniform('do3', 0.4, 0.5),
    'extra_first_layers' : hp.choice('extra_first_layers', [1, 2, 3]),
    'extra_second_layers' : hp.choice('extra_first_layers', [1, 2]),
    }

dummy_space = {
    'x' : hp.uniform('x', 0.2, 0.9),
    }

def dummy_get_params():
    return sample(dummy_space)

def dummy_try_params(n, p):
    acc = p['x'] * n;
    return {'acc' : acc}

def get_params():
    params = sample(space)
    return params

def try_params(n, p):
    km.train_model(n, p)
    
hb = Hyperband(dummy_get_params, dummy_try_params)
results = hb.run()
hb.print_best_results(5)
 
print (hb.get_best_config())
コード例 #4
0
#!/usr/bin/env python

"bare-bones demonstration of using hyperband to tune sklearn GBT"

from hyperband import Hyperband
from defs.gb import get_params, try_params

hb = Hyperband(get_params, try_params)

# no actual tuning, doesn't call try_params()
# results = hb.run( dry_run = True )

results = hb.run(skip_last=1)  # shorter run
results = hb.run()
コード例 #5
0
import pickle
from pprint import pprint

from hyperband import Hyperband
from defs_regression.meta import get_params, try_params

try:
    output_file = sys.argv[1]
    if not output_file.endswith('.p'):
        output_file += '.p'
except IndexError:
    output_file = 'results.p'

print("Will save results to " + output_file)

#

hb = Hyperband(get_params, try_params)
results = hb.run(skip_last=1)

print("%d total, best:\n" % (len(results)))
stuffs = sorted(results, key=lambda x: x['loss'])[:5]
for r in stuffs:
    print("loss: %.2f | %f seconds | %.1f iterations | run %d " %
          (r['loss'], r['seconds'], r['iterations'], r['counter']))
    pprint(r['params'])

print("saving...")

with open(output_file, 'wb') as f:
    pickle.dump(results, f)
コード例 #6
0
def main(directory):

    dir_list = os.listdir(directory)
    for e in dir_list:
        file_name = directory + os.path.basename(e)

        metalearning = ml.main(os.path.basename(e))

        load = Load_Data(file_name)

        train, valid, test = load.split_train_test_valid()

        for i in range(1):

            try:
                output_file1 = sys.argv[1]
                output_file2 = sys.argv[1]
                if not output_file1.endswith('.pkl'):
                    output_file1 += '.pkl'
                    output_file2 += '.pkl'
            except IndexError:
                output_file1 = 'results_ab_' + os.path.basename(e) + '_' + str(
                    i) + '.pkl'
                output_file2 = 'results_ab_test_' + os.path.basename(
                    e) + '_' + str(i) + '.pkl'

                print("Will save results to", output_file1, output_file2)

            # data = load(file_name)

            hb = Hyperband(get_params, try_params, train, valid, test,
                           metalearning)
            results = hb.run(skip_last=1)
            # print(results)
            test_results = hb.tests(results)

            print("{} total, best in validation:\n".format(len(results)))

            for r in sorted(results, key=lambda x: x['loss']):
                print("loss: {:.2} | {} seconds | {:.1f} instances | run {} ".
                      format(r['loss'], r['seconds'], r['instances'],
                             r['counter']))
                pprint(r['params'])
                print

            print("test results")
            for r in range(len(test_results)):
                t = test_results[r]
                print(
                    "loss: {:.2%} | auc: {:.2%} | {} seconds | {} run ".format(
                        t['loss'], t['auc'], t['seconds'], t['counter']))
                pprint(t['params'])
                print

            print("results: ", results)
            print("test results:    ", test_results)
            print("saving...")

            with open(output_file1, 'wb') as f:
                pickle.dump(results, f)

            with open(output_file2, 'wb') as f:
                pickle.dump(test_results, f)

    return 'finished'
コード例 #7
0
def main():
    data = load_mnist()
    hb = Hyperband(data, get_params, try_params)
    results = hb.run()
    print(results)
コード例 #8
0
from pprint import pprint

if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('data_file', help = 'Path to training dataset')
    parser.add_argument('mode', help = 'Background type')
    parser.add_argument('channel', help = 'Decay channel')
    args = parser.parse_args()
    
    print('Loading dataset from: %s with test size 0.05' %(args.data_file))
    dataDMatrix = methods.xgb.load_data(args.data_file, args.mode, args.channel)

    start = time.time()
    print('Running HyperBand')
    hb = Hyperband(dataDMatrix, methods.xgb.get_hyp_config, methods.xgb.run_hyp_config)
    results = hb.run(skip_random_search = True)

    delta_t = time.time() - start
    output = os.path.join('models', args.mode + args.channel + '.json')
    
    print("{} Total, Leaderboard:\n".format(len(results)))

    for r in sorted(results, key = lambda x: x['auc'])[:10]:
        print("auc: {:.2%} | {} s | {:.1f} iterations | run {} ".format( 
                        r['auc'], r['seconds'], r['iterations'], r['counter']))
        pprint(r['params'])
        print

    print('Hyperparameter search complete. Results saved to %s\n' %(output))
    with open(output, 'w') as f:
        json.dump(results, f)
コード例 #9
0
        })
        rmse = np.sum(np.square(out_v_test[-1][0]/c.lambda_max - x_values_sm_b))

        # mse = MSE(np.squeeze(x_values_sm_b), np.squeeze(out_v_test[-1][0]/c.lambda_max))
        # rmse = np.sqrt(mse)
        
        mae = MAE(np.squeeze(x_values_sm_b), np.squeeze(out_v_test[-1][0]/c.lambda_max))
    sess.close()
    r = { 'loss': rmse, 'rmse': rmse, 'mae': mae}
    if return_data:
        r["data"] = (out_v, out_v_test)
    print rmse
    return r

from hyperband import Hyperband

hb = Hyperband(get_params, try_params)
results = hb.run(skip_last = False)


# p = {'weight_init': 2.246314076891554, 'tau_b': 24.007448833081085, 'tau_c': 1.3402075787402872, 'tau_a': 10.881136694144896, 'lrate': 0.5851586265640217, 'theta': 24.0809893295545, 'tau_syn': 0.19291385527484867, 'per_epoch_shift': 22.910523230821795}
# r = try_params(10, p)

# p = {'weight_init': 0.09642636619530962, 'tau_b': 1.9493176363660059, 'tau_c': 1.7340754569936037, 'tau_a': 0.462867947572219, 'lrate': 0.6594933794300799, 'theta': 14.677925945506452, 'tau_syn': 20.646859326121326, 'per_epoch_shift': 22.329439955821854}
# r = try_params(10, p)

top_five = sorted(results, key=lambda k: k["loss"])[0:5]

r = try_params(81, top_five[0]["params"], return_data=True)

out_v, out_v_test = r["data"]