예제 #1
0
def random_tree(depth=3, midrange=3, variation=2):
    tree = Tree()

    def random_name():
        return uuid.uuid4().hex

    def get_children():
        children = midrange + randint(-variation, variation)
        return children

    def create_children(node, depth, count=0):
        if depth <= 0:
            return
        for child in range(get_children()):
            new_node = Node(random_name())
            count += 1
            print(new_node.name, count, depth)
            node.adopt(new_node)
            tree.node_list.append(new_node)
            create_children(new_node, depth - 1, count)

    root = Node(random_name())
    tree.root = root
    tree.node_list.append(root)
    create_children(root, depth)
    print(tree.get_node_index(root))
    print([node.name for node in tree.node_list])
    return tree
예제 #2
0
    def find_next_move(self, board, current_player):
        """
            Define an end time which will act as a terminating condition
        """

        tree = Tree()
        rootNode = tree.get_root()
        rootNode.state.board = board
        rootNode.state.set_player(current_player)
        self.opponent = 3 - current_player

        move_epochs = 50000
        for _ in range(move_epochs):

            promising_node = self.select_promising_node(rootNode)

            if promising_node.get_state().get_board().check_game_state() == -1:
                self.expand_node(promising_node)

            nodeToExplore = promising_node

            if len(promising_node.get_children()) > 0:
                nodeToExplore = promising_node.get_random_child()

            playoutResult = self.simulate_random_playout(nodeToExplore)
            self.back_propogation(nodeToExplore, playoutResult)

        winnerNode = rootNode.get_child_with_max_score()
        tree.set_root(winnerNode)

        newBoard = deepcopy(winnerNode.get_state().get_board())
        return newBoard
예제 #3
0
파일: automota.py 프로젝트: dbunin/Logic
    def parseString(self, toParse, parent=None):
        """
        Change to default comparing to the Object.

        Attributes:
            toParse (String): users inputed ASII string
            parent  (Tree): Node of a tree
        """
        komaIndex = toParse.find(',')
        if komaIndex == -1:
            notIndex = toParse.find('~')
            tempTree = None
            if notIndex != -1:
                if notIndex != 0:
                    raise ValueError
                else:
                    toParse = toParse.replace(')', '')
                    toParse = toParse.replace('(', '')
                    tempTree = Tree(toParse[0])
                    tempTree.addChild(Tree(toParse[1:]))
            else:
                toParse = toParse.replace(')', '')
                toParse = toParse.replace('(', '')
                tempTree = Tree(toParse)
            if self.tree is None:
                self.tree = tempTree
            else:
                parent.addChild(tempTree)
            return toParse
        else:
            while toParse[:komaIndex + 1].count('(') != (
                    toParse[:komaIndex].count(',') + 1):
                newIndex = toParse[komaIndex + 1:].find(',') + 1
                komaIndex += newIndex
                if newIndex == -1:
                    raise ValueError()
            sign = toParse[0]
            tree = Tree(sign)
            if self.tree is None:
                self.tree = tree
            else:
                parent.addChild(tree)
            left = self.parseString(toParse[2:komaIndex], tree)
            right = self.parseString(toParse[komaIndex+1:-1], tree)
            if left == -1 or right == -1 or (
                    not tree.checkIfSignIsCorrect(sign)):
                raise ValueError()
예제 #4
0
def id3(x, y, feature_list, impurity_measure="entropy", parent_node=None):
    """
    Creates a decision tree, with the id3 alogoritm, based on learning data
    :param x: dataset of items as list of list
    :param y: target values for items as list
    :param feature_list: available features for each iteration as list
    :param impurity_measure: impurity measure entropy or gini as string
    :param parent_node: parent for this node iteration as Tree (node representation), default None
    :return: root node for tree as type Tree (node representation)
    """
    x = np.array(x)
    uniques, counts = np.unique(y, return_counts=True)#Find unique target values

    if len(uniques) == 1:  # Set is pure, only one target value left in set
        return Tree(uniques[0], label=uniques[0])

    if len(feature_list) == 0:#No more attributes to split on
        if counts[0] > counts[1]:
            return Tree(uniques[0], label=uniques[0])
        else:
            return Tree(uniques[1], label=uniques[1])

    best_attribute = max_ig(x, y, feature_list, impurity_measure)

    vals, counts = np.unique(x[:, best_attribute], return_counts=True)#Finds unique attribute values
    this_node = Tree(best_attribute, parent_node)

    n_children = len(vals)

    feature_list.remove(best_attribute)
    children_labels = []
    for child in range(n_children): #Creates each children, based on unique attribute values
        x_for_this_child = []
        y_for_this_child = []
        for item in range(len(x)): #Loops through all items, and passes on all items with correct attribute value
            if x[item][best_attribute] == vals[child]: # Items where the attribute value is equal to the decision branch
                x_for_this_child.append(x[item])
                y_for_this_child.append(y[item])

        # Continues to grow this child node
        new_child = id3(x_for_this_child, y_for_this_child, feature_list, impurity_measure, parent_node=this_node)
        new_child.set_was_split_on(vals[child]) #Saves what attribute value this child was split on
        this_node.add_child(new_child)
        children_labels.append(new_child.label)

    label_list, label_count = np.unique(children_labels, return_counts=True) #Lists of children labels
    label = label_list[np.argmax(label_count)]
    this_node.set_label(label)#This node get most popular label among children

    return this_node
예제 #5
0
 def test_parseString(self):
     testedClass = Automota()
     testTree = Tree('&')
     testTree.addChild(Tree('A'))
     testTree.addChild(Tree('~'))
     testTree.right.addChild(Tree('B'))
     testedClass.parseString('&(A,~B)')
     self.assertEqual(testTree, testedClass.tree)
예제 #6
0
 def test_getRows(self):
     testTree = Tree('&')
     testTree.addChild(Tree('A'))
     testTree.addChild(Tree('~'))
     testTree.right.addChild(Tree('B'))
     testedClass = TruthTable(['A', 'B'], testTree)
     expectedValue = [['0', '0', '0'], ['0', '1', '0'], ['1', '0', '1'],
                      ['1', '1', '0']]
     self.assertEqual(expectedValue, testedClass.rows)
예제 #7
0
 def test_findVariables(self):
     testedClass = Automota()
     testTree = Tree('&')
     testTree.addChild(Tree('A'))
     testTree.addChild(Tree('~'))
     testTree.right.addChild(Tree('B'))
     outArray = testedClass.findVariables(testTree)
     outArray.sort()
     self.assertEqual(outArray, ['A', 'B'])
예제 #8
0
 def test_getValuesForVariables(self):
     testTree = Tree('&')
     testTree.addChild(Tree('A'))
     testTree.addChild(Tree('~'))
     testTree.right.addChild(Tree('B'))
     testedClass = TruthTable(['A', 'B'], testTree)
     expectedValue = [['0', '0'], ['0', '1'], ['1', '0'], ['1', '1']]
     values = testedClass.getValuesForVariables(['A', 'B'])
     self.assertEqual(expectedValue, values)
예제 #9
0
    def getNormalForm(self, rows, variables):
        """
        Gets a normal form from a rows.

        Attributes:
            rows      (List): list of rows
            variables (List): list of variables.

        Returns:
            Tree: Normal form tree.
        """
        trees = []
        for row in rows:
            if row[-1] == '1':
                vars = []
                tempTrees = []
                for index, value in enumerate(row[:-1]):
                    if value != '*':
                        if value == '0':
                            vars.append('~' + variables[index])
                        elif value == '1':
                            vars.append(variables[index])
                for var in vars:
                    if '~' in var:
                        tempTree = Tree('~')
                        tempTree.addChild(Tree(var[1:]))
                        tempTrees.append(tempTree)
                    else:
                        tempTrees.append(Tree(var))
                trees.append(self.nodesToTree(tempTrees, '&'))
        return self.nodesToTree(trees, '|')
예제 #10
0
    def nodesToTree(self, trees, sign):
        """
        Transforms a list of nodes to tree
        and connects the nodes with sign.

        Attributes:
            trees (List): list of rows
            sign  (List): sign to connect nodes with.

        Returns:
            Tree: object of type Tree.
        """
        while len(trees) != 1:
            tree1 = trees[0]
            tree2 = trees[1]
            trees.pop(0)
            trees.pop(0)
            newTree = Tree(sign)
            newTree.addChild(tree1)
            newTree.addChild(tree2)
            trees.append(newTree)
        return trees[0]
예제 #11
0
 def test_simplify(self):
     testTree = Tree('|')
     testTree.addChild(Tree('A'))
     testTree.addChild(Tree('|'))
     testTree.right.addChild(Tree('B'))
     testTree.right.addChild(Tree('C'))
     testedClass = TruthTable(['A', 'B', 'C'], testTree)
     expectedValue = [
         ['0', '0', '0', '0'],
         ['*', '*', '1', '1'],
         ['*', '1', '*', '1'],
         ['1', '*', '*', '1'],
     ]
     values = testedClass.rows
     simplified_table = testedClass.simplify(values)
     self.assertEqual(expectedValue, simplified_table)
예제 #12
0
 def test_traverseTree(self):
     testedClass = Automota()
     expectedOutput = [
         'node [ fontname = "Arial" ]',
         'node1 [ label = "=" ]',
         'node1 -- node2',
         'node2 [ label = "A" ]',
         'node1 -- node3',
         'node3 [ label = "~" ]',
         'node3 -- node4',
         'node4 [ label = "B" ]']
     inputTree = Tree('=')
     inputTree.addChild(Tree('A'))
     inputTree.addChild(Tree('~'))
     inputTree.right.addChild(Tree('B'))
     result = testedClass.traverseTree(inputTree)
     self.assertEqual(expectedOutput, result)
예제 #13
0
    def fit(self, data, attributes, target_name):
        '''
            Built and return decision tree using ID3 algorithm
        '''

        data_target = data[target_name]

        # Data target contains one label
        entropy_data_target = Calculate.entropy(data_target)
        if entropy_data_target == 0:
            value_list = Calculate.get_unique_data(data, target_name)
            value_dict = dict()
            for key, value in value_list.items():
                value_dict[key] = len(value_list[key])

            # Set current_node, info_gain, values
            tree = Tree(
                Node(None,
                     entropy_data_target,
                     value_dict,
                     result=data_target[0],
                     is_leaf=True))
            return tree

        # Nothing attribute shall be chosen
        if len(attributes) == 0:
            # Set current_node, info_gain, values
            value_list = Calculate.get_unique_data(data, target_name)
            value_dict = dict()
            for key, value in value_list.items():
                value_dict[key] = len(value_list[key])

            tree = Tree(
                Node(None,
                     entropy_data_target,
                     value_dict,
                     result=Calculate.most_label(data_target),
                     is_leaf=True))
            return tree
        else:
            # Find best attribute to be node using either info gain or gain ratio
            best_attr = ''
            best_point = 0  # Could be Info gain or Gain ratio
            for attr in attributes:
                if self.gain_ratio:
                    point = Calculate.gain_ratio(data[attr], data_target)
                    if point > best_point:
                        best_point = point
                        best_attr = attr
                else:
                    point = Calculate.info_gain(data[attr], data_target)
                    if point > best_point:
                        best_point = point
                        best_attr = attr

            value_list = Calculate.get_unique_data(data, target_name)
            value_dict = dict()
            for key, value in value_list.items():
                value_dict[key] = len(value_list[key])

            # Build decision tree recursively
            dtree = Tree(Node(best_attr, best_point, value_dict))

            # Delete usage attribute in attributes
            attributes.remove(best_attr)

            # Scan all posible value to be generated subtree
            list_attribute = Calculate.get_unique_data(data, best_attr)
            i = 0
            for attribute in list_attribute:
                data = pd.DataFrame(
                    data=list_attribute[attribute]).reset_index(drop=True)
                data.drop(best_attr, axis=1, inplace=True)
                dtree.add_child(self.fit(data, attributes, target_name))
                dtree.children[i].value.edge = attribute
                i += 1
            return dtree
예제 #14
0
파일: myC45.py 프로젝트: kookka/py-dtree
    def __fit_without_prune(self, data, features, target):
        '''
            Built entire decision tree without pruning
        '''

        continuous_features = list()
        discrete_features = list()
        for feature in features:
            if len(list(data[feature])) > 0:
                is_continue = self.is_attr_continue(list(data[feature]))
                if is_continue:
                    continuous_features.append(feature)
                else:
                    discrete_features.append(feature)

        if not continuous_features:
            return MyID3(self.gain_ratio).fit(data, features, target)

        # Continuous attribute

        # If only one value exist
        entropy_data_target = Calculate.entropy(data[target])
        if entropy_data_target == 0:
            value_list = Calculate.get_unique_data(data, target)
            value_dict = dict()
            for key, value in value_list.items():
                value_dict[key] = len(value_list[key])

            return Tree(
                Node(
                    None,
                    0.0,  # Entropy must be 0 since only one value exist
                    value_dict,
                    result=data[target][0],
                    is_leaf=True))

        if (len(features) == 0):
            value_list = Calculate.get_unique_data(data, target)
            value_dict = dict()
            for key, value in value_list.items():
                value_dict[key] = len(value_list[key])
            return Tree(
                Node(None,
                     entropy_data_target,
                     value_dict,
                     result=Calculate.most_label(data[target]),
                     is_leaf=True))

        # Find best attribute and build tree recursively
        best_attr = ''
        best_point = 0
        is_discrete = False
        best_splitter = 0
        chosen_edge = list(['', ''])
        for feature in continuous_features:
            best_treshold = self.find_threshold(data[[feature]],
                                                data[[target]])
            if best_treshold[1] > best_point:
                best_attr = str(feature)
                chosen_edge[0] = best_attr + ' > ' + str(best_treshold[0])
                chosen_edge[1] = best_attr + ' <= ' + str(best_treshold[0])
                best_point = best_treshold[1]
                best_splitter = best_treshold[0]
        for feature in discrete_features:
            point = Calculate.info_gain(data[feature], data[target])
            if point > best_point:
                best_point = point
                best_attr = str(feature)
                is_discrete = True

        value_list = Calculate.get_unique_data(data, target)
        value_dict = dict()
        for key, value in value_list.items():
            value_dict[key] = len(value_list[key])
        dtree = Tree(Node(best_attr, best_point, value_dict))

        # Scan all posible value to be generated subtree
        if is_discrete:
            list_attribute = Calculate.get_unique_data(data, best_attr)
        else:
            list_attribute = Calculate.split_by_threshold(
                data, best_attr, best_splitter)

        i = 0

        for attribute in list_attribute:
            data = pd.DataFrame(data=list_attribute[attribute]).reset_index(
                drop=True)
            dtree.add_child(self.__fit_without_prune(data, features, target))
            if is_discrete:
                dtree.children[i].value.edge = attribute
            else:
                dtree.children[i].value.edge = chosen_edge[i]
            i += 1

        return dtree
예제 #15
0
from node import Node, Tree
from bfs import bfs_traversal
from dfs import dfs_traversal
from tree_traversals import in_order_traversal, pre_order_traversal, post_order_traversal

first_tree_dict = {
    "a": ("b", "c"),
    "b": ("d", "e"),
    "c": ("h", "i"),
    "d": ("f", "g")
}

other_tree_dict = {
    "a": ("b", "b"),
    "b": ("c", "c"),
    "c": ("d", "d"),
    "d": ("e", "e"),
    "e": ("f", "f")
}

if __name__ == "__main__":
    first_tree = Tree("a", first_tree_dict)
    other_tree = Tree("a", other_tree_dict)
    print(dfs_traversal(first_tree.get_root()))
예제 #16
0
    def load_json(self, view, scene, points):
        """
        Фунция, выводящая openFileDialog и выполняющая загрузку указанного JSON файла с интерфейсом
        :param view: левый или правый graphicsView
        :param scene: одна из сцен, присвоенных graphicsView
        :param points: точки, описывающие положения тепловых зон на тепловой карте
        """
        file_name = QtWidgets.QFileDialog.getOpenFileName(
            self.main_window, 'Load file', './', "*.json")

        if file_name[0]:
            # Открываем json файл и считываем данные
            with open(file_name[0]) as data_file:
                data = json.load(data_file)

            # Создаём корень дерева элементов
            root = Node(data["tagName"], data["id"], data["className"],
                        data["clientWidth"], data["clientHeight"],
                        data["clientTop"], data["clientLeft"])
            children = data["children"]
            tree = Tree(root)

            if view == self.ui.initialView:
                self.initial_tree = tree
                self.loaded_interfaces[0] = True
                if PRINT_INFO:
                    print(
                        "====================== INITIAL TREE ======================"
                    )
            else:
                self.optimized_tree = tree
                self.best_tree = copy.deepcopy(tree)
                self.loaded_interfaces[1] = True
                if PRINT_INFO:
                    print(
                        "====================== OPTIMIZED TREE ======================"
                    )

            # Вызываем функцию заполнения дерева и выводим структуру дерева
            self.identifier = 1
            self.fill_tree(root, children)
            if PRINT_INFO:
                tree.draw_tree()
                print()

            # Рисуем элементы дерева на QGraphicsView
            self.draw_interface(view, scene, tree, points)

            # Обнуляем статистику
            self.count_iterations = 0
            self.count_useless_iterations = 0

            # Делаем доступными кнопки управления отображением
            self.enable_check_buttons()

            # Если оба интерфейса загружены - делаем доступными кнопки управления алгоритмами оттимизации
            if self.loaded_interfaces[0] and self.loaded_interfaces[1]:
                self.enable_algorithm_buttons()
                self.initial_energy = FitnessFunctions.get_energy(
                    self.initial_tree, self.ui.optimizedView.width(),
                    self.ui.optimizedView.height())
예제 #17
0
    Return the text_surface, text's start position and height as a tuple.'''

    white = (255, 255, 255)
    font = pygame.font.Font(None, 30)
    text_surface = font.render(previous_filename, 1, white)

    text_h = font.get_height()
    text_w = font.get_linesize()
    text_pos = (0, (screen_size[1] - 1 - text_h - previous_height))

    return (text_surface, text_pos, text_h)


if __name__ == '__main__':

    tree = Tree()

    # ask the user to choose a directory
    d = media.choose_folder()
    tree.insert_directory((d, os.path.getsize(d)))

    # ask the user to choose whether file is colored according to filetype
    color = raw_input("Do you want files colored according to \
their filetype?(yes or no)")
    while color != "yes" and color != "no":
        color = raw_input("Do you want files colored according \
to their filetype?(yes or no)")
    
    # ask the user whether to use gradient choice
    gradient = raw_input("Do you want files colored plain \
    or gradient? (p or g)\n" )
예제 #18
0
 def test_getNormalForm(self):
     testTree = Tree('>')
     testTree.addChild(Tree('A'))
     testTree.addChild(Tree('B'))
     testedClass = TruthTable(['A', 'B'], testTree)
     values = testedClass.rows
     simplified_table = testedClass.simplify(values)
     normalForm = testedClass.getNormalForm(simplified_table, ['A', 'B'])
     expectedTree = Tree('|')
     expectedTree.addChild(Tree('~'))
     expectedTree.addChild(Tree('B'))
     expectedTree.left.addChild(Tree('A'))
     self.assertEqual(expectedTree, normalForm)