예제 #1
0
def main_process(dtrain, dtest, params, epsilon, stop_value=None):
    print("Starting hyperparameter tuning with start params:")
    print(utils.print_params(params))
    print("With epsilon (stop) value: {}".format(epsilon))
    gradients = utils.get_gradient_list(params, global_constraint.STEP)
    steps = utils.get_possible_steps(params, gradients, [])
    min_mae = float("Inf")
    step_mae = float("Inf")
    iterations = 0
    best_params = params.copy()
    last_steps = []
    while True:
        last_steps = steps.copy()
        for step_params in steps:
            print(utils.print_params(step_params))
            cv_results = xgb.cv(step_params,
                                dtrain,
                                num_boost_round=10,
                                seed=42,
                                nfold=5,
                                metrics={'mae'},
                                early_stopping_rounds=10)

            mean_mae = cv_results['test-mae-mean'].min()
            boost_rounds = cv_results['test-mae-mean'].argmin()
            print("\tMAE {} for {} rounds".format(mean_mae, boost_rounds))
            iterations = iterations + 1
            print(iterations)
            if mean_mae < min_mae:
                min_mae = mean_mae
                best_params = step_params.copy()

        if stop_value is not None and min_mae < stop_value:
            break

        if (abs(step_mae - min_mae) < epsilon):
            if (iterations < 500):
                utils.reduce_steps()
                step_mae = min_mae
                steps = utils.get_possible_steps(best_params, gradients,
                                                 last_steps)
            else:
                break
        else:
            step_mae = min_mae
            steps = utils.get_possible_steps(best_params, gradients,
                                             last_steps)
            print(len(steps))

    print(len(steps))

    print("Found best solution:")
    print(utils.print_params(best_params))
    print("MAE:")
    print(min_mae)

    return (params, min_mae, iterations)
예제 #2
0
 def step(self, game_state: np.array, last_step: int):
     opts = get_possible_steps(game_state)
     min_val, min_step = 6, opts[0]
     for i in opts:
         board_local = game_state.copy()
         highest_index = np.where(board_local[:, i] == 0)[0][-1]
         if highest_index < min_val:
             min_val = highest_index
             min_step = i
     return min_step
예제 #3
0
 def step(self, game_state: np.array, last_step: int):
     opts = get_possible_steps(game_state)
     for i in opts:
         board_local = game_state.copy()
         highest_index = np.where(board_local[:, i] == 0)[0][-1]
         board_local[highest_index, i] = 1
         if has_game_ended(board_local, i):
             return i
         else:
             board_local[highest_index, i] = 2
             if has_game_ended(board_local, i):
                 return i
     [result] = np.random.choice(opts, 1, replace=False)
     return result
예제 #4
0
    def select(self, game_state: np.array, last_step: int):
        curr_state_node = self.state_search_tree[self.last_known_state_key]
        valid_steps = get_possible_steps(curr_state_node.state)
        valid_step_probs = []

        child_wins = np.zeros(len(valid_steps))
        child_visits = np.zeros(len(valid_steps))
        for i, step in enumerate(valid_steps):
            child_wins[i], child_visits[
                i] = self.child_won_visited_simulate_if_needed(step)

        step_idx = self.step_idx_to_take_UCB(child_wins, child_visits)

        return valid_steps[step_idx]
예제 #5
0
 def step(self, game_state: np.array, last_step: int):
     pos_steps = get_possible_steps(game_state)
     opts = [1 if i in pos_steps else 0 for i in range(7)]
     opts = opts * self.mass
     return np.argmax(opts)
예제 #6
0
 def step(self, game_state: np.array, last_step: int):
     opts = get_possible_steps(game_state)
     [result] = np.random.choice(opts, 1, replace=False)
     return result
예제 #7
0
def main_process_class(dtrain,
                       dtest,
                       params,
                       epsilon,
                       y_test,
                       stop_value=None):
    print("Starting hyperparameter tuning with start params:")
    print(utils.print_params(params))
    print("With epsilon (stop) value: {}".format(epsilon))
    gradients = utils.get_gradient_list(params, global_constraint.STEP)
    steps = utils.get_possible_steps(params, gradients, [])
    maxacc = 0
    step_mae = 0
    iterations = 0
    best_params = params.copy()
    last_steps = []
    while True:
        last_steps = steps.copy()
        for step_params in steps:
            print(utils.print_params(step_params))
            #bst <- xgboost(data = dtrain, max.depth = 2, eta = 1, nthread = 2, nround = 2, , verbose = 2)

            cv_results = xgb.train(
                step_params,
                dtrain,
                num_boost_round=10,
            )
            print(step_mae)
            preds = cv_results.predict(dtest)
            preds = [1 if z > 0.5 else 0 for z in preds]

            #print(preds)
            err = 0

            res = [i for i, j in zip(preds, y_test) if i == j]
            #accuracy = accuracy_score(dtest.label, predictions)
            #print("Accuracy: %.2f%%" % (accuracy * 100.0))
            print(len(res))

            print(100 * len(res) / len(preds))

            if len(res) > maxacc:
                maxacc = len(res)
                best_params = step_params.copy()

        iterations = iterations + 1
        print(iterations)
        if (abs(step_mae - maxacc) < epsilon):
            if (iterations < 500):
                utils.reduce_steps()
                step_mae = maxacc
                steps = utils.get_possible_steps(best_params, gradients,
                                                 last_steps)
            else:
                break
        else:
            step_mae = maxacc
            print("aaaa")
            steps = utils.get_possible_steps(best_params, gradients,
                                             last_steps)

    print("Found best solution:")
    print(utils.print_params(best_params))
    print("MAE:")
    print(maxacc)

    return (params, maxacc, iterations)
예제 #8
0
 def get_child_state_by_step(self, step: int):
     if step not in get_possible_steps(self.state):
         print(self.state)
         print(step)
     assert step in get_possible_steps(self.state)
     return execute_step(self.state, step)
예제 #9
0
 def get_child_state_by_idx(self, step_idx: int):
     step = get_possible_steps(self.state)[step_idx]
     return execute_step(self.state, step)
예제 #10
0
 def valid_steps(self):
     return get_possible_steps(self.state)