def build_tree(self, training_data, limit=None):
        """
        Recursive function to build decision tree

        Parameters:
        training_data - Pandas DataFrame; last column is taken as the class labels

        Keyword Args:
        limit - Max depth limit of tree process; None by default
        """
        node = AnyNode()

        # Data is pure, create leaf of tree with class label
        if len(set(training_data.iloc[:, -1])) == 1:
            node.label = training_data.iloc[0, -1]
            return node

        # No more features to split on; use most common label as class label
        if len(training_data.columns) == 1:
            node.label = max(set(training_data.iloc[:, -1]),
                             key=list(training_data.iloc[:, -1]).count)
            return node

        # Default; begin tree splitting
        # Determine feature that gives best information gain
        split_feature = max(
            training_data.columns[0:-1],
            default=training_data.columns[0],
            key=lambda x: info_gain(training_data, x, self.bins))
        node.attribute = split_feature

        # Lookup possible values for splitting feature and
        # create leaves/subtrees
        values = self.values[split_feature]
        for value in values:
            # Create subset with feature removed
            training_data_v = subset_by_value(training_data, split_feature,
                                              value)
            training_data_v = training_data_v.drop(split_feature, axis=1)

            # Subset data based on value
            if training_data_v.empty or (limit is not None and limit < 1):
                # subset is empty; create child leaf with label of the
                # most common class label
                child = AnyNode()
                child.label = max(set(training_data.iloc[:, -1]),
                                  key=list(training_data.iloc[:, -1]).count)
                child.value = value
            else:
                # subset is not empty; create child subtree recursively
                new_limit = None if limit is None else limit - 1
                child = self.build_tree(training_data_v, new_limit)
                child.value = value

            # Add new node as child of the current node and
            # map value to this child
            node.children = list(node.children) + [child]

        return node
Beispiel #2
0
	def build_move_tree(self, node: AnyNode, next_player_color: PlayerColor, max_depth: int):
		is_root = node.level == 1; #root node with level 1 represents null move which cannot be executed or evaluated
		opponent_turn = node.level % 2 == 1
		if not is_root:
			self.evaluate_move(node.data)
			if node.level == max_depth or node.data.value == WINNING_MOVE_VALUE:
				#we've reached a leaf in our tree, assign node value and finish recursion
				node.value = -node.data.value if opponent_turn else node.data.value
				return

			self.execute(node.data)

		next_moves = self.get_all_possible_moves(next_player_color)
		if len(next_moves) == 0:
			#the next player cannot move, hence this node is a winning node
			node.value = -WINNING_MOVE_VALUE if opponent_turn else WINNING_MOVE_VALUE
		else:
			for next_move in next_moves:
				next_move_node = AnyNode(parent = node, data = next_move, level = node.level + 1)
				self.build_move_tree(next_move_node, next_player_color.get_opposite(), max_depth)

		if not is_root:
			self.undo_last_move()
Beispiel #3
0
	def get_best_move_minimax(self, player_color: PlayerColor) -> Move:
		root = AnyNode(id = 'Root', level = 1)
		self.build_move_tree(root, player_color, max_depth = 4)

		#we have the move tree with evaluated leaves, now calculate all nodes
		root.value = calculate_value_by_minimax(root)

		#get moves with best value, which is the value of the root node
		best_moves = []
		for node in root.children:
			if node.value == root.value:
				best_moves.append(node.data)

		return best_moves[random.randint(0, len(best_moves) - 1)]