Beispiel #1
0
def main():
    tuner = Tuner(param_space, objective, {'num_iteration': 30})
    results = tuner.maximize()

    print('best parameters:', results['best_params'])
    print('best accuracy:', results['best_objective'])

    assert results['best_objective'] > 0.93
def main():

    if (sys.argv[1] != "nextpnr-ice40"):
        print("ERROR: only nexpnr-ice40 is supported")
        sys.exit(1)

    space = {
        'alpha': [x * .025 for x in range(1, 21)],  # 0.025->0.5 (.25 step)
        'beta': [x * .025 for x in range(20, 41)],  # 0.500->1.0 (.25 step)
        'critexp': range(1, 11, 1),
        'tweight': range(1, 35, 5),
    }

    cfg = dict(num_iteration=20)
    tuner = Tuner(space, evalpnr, cfg)
    results = tuner.maximize()

    print("Best clock: %s Mhz" % results["best_objective"])
    print("Best parameters: [%s]" % results["best_params"])
Beispiel #3
0
    # x_test = std_scaler.transform(x_test)

    del x_train, x_dev  # freeing up space

    # tuner = svm_fits.train_mango_skcv(X=x_combined, Y=y_combined, n_splits=10)

    param_space = {
        # 'kernel': ['rbf', 'linear'],
        # 'gamma': uniform(0.1, 4),  # 0.1 to 4.1
        'C': [1e-6, 1e-5, 1e-4, 1e-3, 0.01, 0.1]  # loguniform(-6, 6)  # 10^-7 to 10
    }


    # @scheduler.serial
    def objectiveSVM(args_list):
        results = []
        for hyper_par in args_list:
            svc = svm.LinearSVC(**hyper_par, max_iter=100000,
                                class_weight='balanced')
            # svc = thunder(**hyper_par, max_iter=100000,
            #               class_weight='balanced')
            result = cross_val_score(svc, x_combined, y_combined, scoring=my_scorer, n_jobs=-1, cv=10).mean()
            results.append(result)
        return results

    tuner = Tuner(param_dict=param_space, objective=objectiveSVM)
    results = tuner.maximize()

    print('best hyper parameters:', results['best_params'])
    print('best objective:', results['best_objective'])