示例#1
0
 def handle_comp(self):
     """
     Method to work with computer
     :return: None
     """
     tree = BTree(self.board)
     tree.build()
     move = tree.f_move('x')
     if self.board.positions[move[0], move[1]] is not None:
         print("Computer made mistake")
         raise UserError
     self.board.add_pos(move, 'x')
     print("Computer moved {}".format(move))
示例#2
0
class Board:
    def __init__(self, n):
        self.width = n
        self.tree = BTree(n)
        self.tree.build()
        self.current_step = self.tree._root
        self.first_is_x = True

    def make_step(self, row, col):
        if self.current_step.field.board[row][col] is not None:
            raise Exception("Wrong move")
        for step in self.current_step.children:
            if step.step_row == row and step.step_col == col:
                self.current_step = step
                break

    def get_best_step(self):
        best_step = None
        for step in self.current_step.children:
            if best_step is None or best_step.rating < step.rating:
                best_step = step
        return best_step.step_row, best_step.step_col

    def __str__(self):
        s = ""
        for row in range(self.width):
            for col in range(self.width):
                is_first = self.current_step.field.board[row][col]
                if is_first is None:
                    s += "  | "
                elif self.first_is_x == is_first:
                    s += "X | "
                else:
                    s += "O | "
            s = s[:-3] + "\n"
            s += ("-" * self.width * 3) + "\n"
        return s[:-((self.width * 3)+1)]
def train_index(threshold, use_threshold, distribution, path):
    # data = pd.read_csv("data/random_t.csv", header=None)
    # data = pd.read_csv("data/exponential_t.csv", header=None)
    data = pd.read_csv(path, header=None)
    train_set_x = []
    train_set_y = []
    test_set_x = []
    test_set_y = []

    set_data_type(distribution)
    # read parameter
    if distribution == Distribution.RANDOM:
        parameter = ParameterPool.RANDOM.value
    elif distribution == Distribution.LOGNORMAL:
        parameter = ParameterPool.LOGNORMAL.value
    elif distribution == Distribution.EXPONENTIAL:
        parameter = ParameterPool.EXPONENTIAL.value
    elif distribution == Distribution.NORMAL:
        parameter = ParameterPool.NORMAL.value
    else:
        return
    stage_set = parameter.stage_set
    # set number of models for second stage (1 model deal with 10000 records)
    #stage_set[1] = int(round(data.shape[0] / 10000))
    core_set = parameter.core_set
    train_step_set = parameter.train_step_set
    batch_size_set = parameter.batch_size_set
    learning_rate_set = parameter.learning_rate_set
    keep_ratio_set = parameter.keep_ratio_set

    global TOTAL_NUMBER
    TOTAL_NUMBER = data.shape[0]
    for i in range(data.shape[0]):
        train_set_x.append(data.ix[i, 0])
        train_set_y.append(data.ix[i, 1])
        #train_set_x.append(data.ix[i, 0])
        #train_set_y.append(data.ix[i, 1])

    test_set_x = train_set_x[:]
    test_set_y = train_set_y[:]     
    # data = pd.read_csv("data/random_t.csv", header=None)
    # data = pd.read_csv("data/exponential_t.csv", header=None)
    # for i in range(data.shape[0]):
    #     test_set_x.append(data.ix[i, 0])
    #     test_set_y.append(data.ix[i, 1])

    print("*************start Learned NN************")
    print("Start Train")
    start_time = time.time()
    # train index
    trained_index = hybrid_training(threshold, use_threshold, stage_set, core_set, train_step_set, batch_size_set, learning_rate_set,
                                    keep_ratio_set, train_set_x, train_set_y, [], [])
    end_time = time.time()
    learn_time = end_time - start_time
    print("Build Learned NN time ", learn_time)
    print("Calculate Error")
    err = 0
    start_time = time.time()
    # calculate error
    for ind in range(len(test_set_x)):
        # pick model in next stage
        pre1 = trained_index[0][0].predict(test_set_x[ind])
        if pre1 > stage_set[1] - 1:
            pre1 = stage_set[1] - 1
        # predict position
        pre2 = trained_index[1][pre1].predict(test_set_x[ind])
        err += abs(pre2 - test_set_y[ind])
    end_time = time.time()
    search_time = (end_time - start_time) / len(test_set_x)
    print("Search time %f " % search_time)
    mean_error = err * 1.0 / len(test_set_x)
    print("mean error = ", mean_error)
    print("*************end Learned NN************\n\n")
    # write parameter into files
    result_stage1 = {0: {"weights": trained_index[0][0].weights, "bias": trained_index[0][0].bias}}
    result_stage2 = {}
    for ind in range(len(trained_index[1])):
        if trained_index[1][ind] is None:
            continue
        if isinstance(trained_index[1][ind], BTree):
            tmp_result = []
            for ind, node in trained_index[1][ind].nodes.items():
                item = {}
                for ni in node.items:
                    if ni is None:
                        continue
                    item = {"key": ni.k, "value": ni.v}
                tmp = {"index": node.index, "isLeaf": node.isLeaf, "children": node.children, "items": item,
                       "numberOfkeys": node.numberOfKeys}
                tmp_result.append(tmp)
            result_stage2[ind] = tmp_result
        else:
            result_stage2[ind] = {"weights": trained_index[1][ind].weights,
                                  "bias": trained_index[1][ind].bias}
    result = [{"stage": 1, "parameters": result_stage1}, {"stage": 2, "parameters": result_stage2}]

    with open("model/" + pathString[distribution] + "/full_train/NN/" + str(TOTAL_NUMBER) + ".json", "wb") as jsonFile:
        json.dump(result, jsonFile)

    # wirte performance into files
    performance_NN = {"type": "NN", "build time": learn_time, "search time": search_time, "average error": mean_error,
                      "store size": os.path.getsize(
                          "model/" + pathString[distribution] + "/full_train/NN/" + str(TOTAL_NUMBER) + ".json")}
    with open("performance/" + pathString[distribution] + "/full_train/NN/" + str(TOTAL_NUMBER) + ".json",
              "wb") as jsonFile:
        json.dump(performance_NN, jsonFile)

    del trained_index
    gc.collect()
    
    # build BTree index
    print("*************start BTree************")
    bt = BTree(2)
    print("Start Build")
    start_time = time.time()
    bt.build(test_set_x, test_set_y)
    end_time = time.time()
    build_time = end_time - start_time
    print("Build BTree time ", build_time)
    err = 0
    print("Calculate error")
    start_time = time.time()
    for ind in range(len(test_set_x)):
        pre = bt.predict(test_set_x[ind])
        err += abs(pre - test_set_y[ind])
        if err != 0:
            flag = 1
            pos = pre
            off = 1
            while pos != test_set_y[ind]:
                pos += flag * off
                flag = -flag
                off += 1            
    end_time = time.time()
    search_time = (end_time - start_time) / len(test_set_x)
    print("Search time ", search_time)
    mean_error = err * 1.0 / len(test_set_x)
    print("mean error = ", mean_error)
    print("*************end BTree************")

    # write BTree into files
    result = []
    for ind, node in bt.nodes.items():
        item = {}
        for ni in node.items:
            if ni is None:
                continue
            item = {"key": ni.k, "value": ni.v}
        tmp = {"index": node.index, "isLeaf": node.isLeaf, "children": node.children, "items": item,
               "numberOfkeys": node.numberOfKeys}
        result.append(tmp)

    with open("model/" + pathString[distribution] + "/full_train/BTree/" + str(TOTAL_NUMBER) + ".json",
              "wb") as jsonFile:
        json.dump(result, jsonFile)

    # write performance into files
    performance_BTree = {"type": "BTree", "build time": build_time, "search time": search_time,
                         "average error": mean_error,
                         "store size": os.path.getsize(
                             "model/" + pathString[distribution] + "/full_train/BTree/" + str(TOTAL_NUMBER) + ".json")}
    with open("performance/" + pathString[distribution] + "/full_train/BTree/" + str(TOTAL_NUMBER) + ".json",
              "wb") as jsonFile:
        json.dump(performance_BTree, jsonFile)

    del bt
    gc.collect()