Example #1
0
    def __init__(self, phytomerNumber, rank, state, treeInstance, day, month,
                 year, TT, initiationday, initiationmonth, initiationyear,
                 initiationTT):
        #self.name = "Phytomer_" + repr(phytomerNumber)
        self.name = phytomerNumber

        self.rank = rank
        self.state = state
        self.tree = treeInstance
        self.appearanceDay = day
        self.appearanceMonth = month
        self.appearanceYear = year
        self.appearanceTT = TT
        self.initiationDay = initiationday
        self.initiationMonth = initiationmonth
        self.initiationYear = initiationyear
        self.initiationTT = initiationTT
        self.deathday = 0
        self.deathmonth = 0
        self.deathyear = 0
        self.deathTT = 0
        self.leaf = Leaf(self)
        self.bunch = Bunch(self)
        self.internode = Internode(self)
        self.demand = 0
        self.step_apparition = self.tree.simulation.step
Example #2
0
 def __init__(self, d):
     # d is the max depht for the tree
     self.maxDepth = d
     self.fitness = 0
     self.root = Leaf() #root is of type Leaf
     self.root.init(d)
     self.header = ['val1', 'val2', 'val3', 'val4', 'val4', 'val5', 'val6', 'val7', 'val8']
Example #3
0
    def __init__(self, s_input):
        self.root = Leaf(is_root=True)
        self.leaves = []

        for char, weight in Counter(s_input).items():
            self.leaves.append(Leaf(is_root=False, weight=weight, char=char))

        self.build_tree()
        print(self)
Example #4
0
    def trickle(self, vector):
        """Gets a vector and hands it down to the closest child, checks for split afterwards."""

        # Refresh CF vector.
        self.update_cf(vector)

        closest = self.closest(vector, self.children)

        if closest:
            closest.trickle(vector)
        else:
            leaf = Leaf()
            leaf.trickle(vector)
            self.add_node(leaf)
Example #5
0
        def addChild(parent, depth):
            if (depth < DEPTH):
                print "%s () [%s]" % ('\t' * depth,
                                      'DC' + str(len(datacentres)))
                child = Datacentre('DC' + str(len(datacentres)), self.env,
                                   sizeStruct[depth], self.applications)
                datacentres[child.getName()] = child

                for childNbr in range(childStruct[depth]):
                    addChild(child, depth + 1)

            else:
                child = Leaf(str(len(leafs)),
                             self.env)  # Change workload to LEAF#
                leafs[child.getName()] = child
                print "%s X [%s]" % ('\t' * depth, child.getName())

            link = Link('LINK' + str(len(links)), self.env,
                        Link.RESOURCE_TYPES['M'], 100, self.applications)
            links[link.getName()] = link
            link.addPeer(child)
            link.addPeer(parent)

            parent.addPeer(link)
            child.addPeer(link)
Example #6
0
    def _make_leaf(self, dataset):
        try:
            return Leaf(dataset)

        except Exception as e:
            logger.error(
                "Something failed while making a leaf:\n{}".format(str(e)))
Example #7
0
 def build_tree(self):
     l1, l2 = self.get_two_lower_leaves()
     new_weight = l1.weight + l2.weight
     new_leaf = Leaf(weight=new_weight, left_child=l1, right_child=l2)
     self.leaves.append(new_leaf)
     l1.parent = new_leaf
     l2.parent = new_leaf
     return
    def ID3(self, depth, s, attribute=None, label=None, alg='info'):
        '''

        :param s: set of examples
        :param attribute: the set of measured attributes
        :param label: the target attribute (the prediction)
        :return:
        '''

        if (depth == self.maxDepth):
            return Leaf(self.getMostCommonLabel(s))

        allLabelsMatch = self.doAllLabelsMatch(s)
        if allLabelsMatch == True:
            label = s[0].getLabel()
            if label is not None:
                return Leaf(label)
            else:
                # Most common label
                mostCommonLabel = self.getMostCommonLabel(s)
                return Leaf(mostCommonLabel)
        else:
            newNode = Node()

            # Find an attribute that best splits the data using information gain
            (bestAttributeIndex, __) = self.findBestAttribute(s, alg)
            bestAttribute = self.attributeList[bestAttributeIndex]
            newNode.splittingAttr = bestAttributeIndex
            #attrDict = getTypeCountDict(s,bestAttributeIndex)
            for value in bestAttribute.values:
                #subsets.append(getExampleSubset(s, bestAttributeIndex, value))
                subset = getExampleSubset(s, bestAttributeIndex, value)
                if len(subset) > 0:
                    newNode.children[value] = self.ID3(depth + 1, subset, None,
                                                       None, alg)
                else:
                    mostCommonLabel = self.getMostCommonLabel(s)
                    newNode.children[value] = Leaf(mostCommonLabel)

            #for subset in subsets:
            #    newNode.children.append(self.ID3(subset,None,None,alg))

            return newNode

            # Create a subset for each attribute value for the best attribute
            '''
Example #9
0
    def crossover(self, ch1, ch2):
        node1 = choice(ch1.root.getLeafs())
        node2 = choice(ch2.root.getLeafs())
        c = Chromosome(self.maxDepth)

        if ch1.root == node1:
            c.root = node2.deepcopy()
        else:
            c.root = Leaf()
            c.root.change(ch1.root, node1, node2)
        return c
Example #10
0
class Chromosome:
    def __init__(self, d):
        # d is the max depht for the tree
        self.maxDepth = d
        self.fitness = 0
        self.root = Leaf() #root is of type Leaf
        self.root.init(d)
        self.header = ['val1', 'val2', 'val3', 'val4', 'val4', 'val5', 'val6', 'val7', 'val8']

    def fitness_eval(self, X, Y):
        self.fitness = 0
        exp = str(self.root)
        cnt = 0
        for (x, y) in zip(X, Y):
            cnt += 1
            if cnt % 100 == 0:
                print(cnt)
            for i in range(len(x)):
                exec("{} = {}".format(self.header[i], x[i]))
            res = eval(exp)
            self.fitness += abs(res - float(y))
        self.fitness = self.fitness / len(X)
        return self.fitness

    def crossover(self, ch1, ch2):
        node1 = choice(ch1.root.getLeafs())
        node2 = choice(ch2.root.getLeafs())
        c = Chromosome(self.maxDepth)

        if ch1.root == node1:
            c.root = node2.deepcopy()
        else:
            c.root = Leaf()
            c.root.change(ch1.root, node1, node2)
        return c

    def mutate(self, prob):
        pos = randint(1, self.root.size)
        self.root.mutate(pos, prob)
def decision_tree(examples, attributes, parent_examples, weights=None):
    """
    The method to construct the decision tree
    :param examples:
    :param attributes:
    :param parent_examples:
    :param weights:
    :return: Decision Tree
    """
    if len(examples) == 1:
        # print(plurality_value(parent_examples))
        return Leaf(plurality_value(parent_examples, attributes))
    elif all_same_classification(examples):
        # print(examples[0][0])
        return Leaf(examples[1][0])
    elif len(attributes) == 1:
        # print(plurality_value(examples))
        return Leaf(plurality_value(examples, attributes))
    else:
        target_entropy_val = find_target_entropy(examples)
        A = choose_best_attribute(examples, attributes, target_entropy_val)
        node = DTree(A, None, None, None, None)
        A_index = attributes.index(A)
        true_data, false_data = partition(examples, A, A_index, examples[0])

        updated_columns = []
        updated_columns.extend(attributes[:A_index])
        updated_columns.extend(attributes[A_index + 1:])
        updated_columns_copy_left = updated_columns[:]
        updated_columns_copy_right = updated_columns[:]

        node.true_branch = decision_tree(true_data, updated_columns_copy_left,
                                         examples)
        node.false_branch = decision_tree(false_data,
                                          updated_columns_copy_right, examples)
        return node
    def build_tree(rows):
        gain, question = DecisionTreeClassifier.find_best_split(rows)

        if gain == 0:
            return Leaf(rows)

        # If we reach here, we have found a useful feature / value to partition on.
        true_rows, false_rows = DecisionTreeClassifier.partition(
            rows, question)

        # Recursively build the true branch.
        true_branch = DecisionTreeClassifier.build_tree(true_rows)

        # Recursively build the false branch.
        false_branch = DecisionTreeClassifier.build_tree(false_rows)

        return Decision_Node(question, true_branch, false_branch)
Example #13
0
    def __init__(self, n, m):
        self.list_leafs = []
        self.list_leafs.append(Leaf(n, m, 0, 0))

        did_split = True
        while did_split:
            did_split = False
            list_ = []
            for vert in self.list_leafs:
                if vert.width > vert.max_size or vert.height > vert.max_size or random.random(
                ) > 0.25:
                    if vert.split():
                        list_.append(vert.lchild)
                        list_.append(vert.rchild)
                        did_split = True
                    else:
                        list_.append(vert)
                else:
                    list_.append(vert)

            self.list_leafs = list_

        print("binary splitted space created")
Example #14
0
def build_tree(rows):
    """
	The tutorial tree
	Builds the tree.

    Rules of recursion: 1) Believe that it works. 2) Start by checking
    for the base case (no further information gain). 3) Prepare for
    giant stack traces.

    """

    # Try partitioing the dataset on each of the unique attribute,
    # calculate the information gain,
    # and return the question that produces the highest gain.
    gain, question = find_best_split(rows, header)

    # Base case: no further info gain
    # Since we can ask no further questions,
    # we'll return a leaf.
    if gain == 0:
        return Leaf(rows)

    # If we reach here, we have found a useful feature / value
    # to partition on.
    true_rows, false_rows = partition(rows, question)

    # Recursively build the true branch.
    true_branch = build_tree(true_rows)

    # Recursively build the false branch.
    false_branch = build_tree(false_rows)

    # Return a Question node.
    # This records the best feature / value to ask at this point,
    # as well as the branches to follow
    # dependingo on the answer.
    return DecisionNode(question, true_branch, false_branch)
def build_tree(data,treshold, classic):
    """Builds the tree."""

    # Try partitioing the dataset on each of the unique attribute,
    # calculate the information gain, and return the question.
    if classic == 1:
        gain, question = classic_choose_split(data, treshold)
    else:    
        gain, question = choose_split(data, treshold)

    # No more questions, so it return a leaf.
    if gain ==  float('Inf'): #
        return Leaf(data)

    true_data, false_data = partition(data, question)

    # Recursively build the true branch.
    true_branch = build_tree(true_data, treshold, classic)

    # Recursively build the false branch.
    false_branch = build_tree(false_data, treshold, classic)

    # Return a Question node.
    return Decision_Node(question, true_branch, false_branch)
Example #16
0
    def iterative_build_tree(self, rows, header, maxNodes):
        """
			Builds the tree in an iterative way.
			It will start with 1 node at the start and has to learn with at max the number of nodes decided.
		
		"""
        currentNodes = 1  #the first node is the root.

        gain, question = find_best_split(rows, header)

        # Since we can ask no further questions,
        # we'll return a leaf.
        if gain == 0:
            return Leaf(rows)
        else:  # now starts the real problem
            root = Node(question, gain, rows, None,
                        None)  #The root is a node now

            true_rows, false_rows = partition(root.rows, root.question)

            gainT, questionT = find_best_split(
                true_rows,
                header)  #finds the best gini for both the false and the true
            gainF, questionF = find_best_split(false_rows, header)

            true_branch = None
            false_branch = None
            nodes_to_split = list()

            root = DecisionNode(question, None, None)

            if (gainT == 0
                ):  # Check if the gain is 0... in that case that's a leaf
                true_branch = Leaf(true_rows)
            else:
                true_branch = Node(questionT, gainT, true_rows, root, True)
                nodes_to_split.append(true_branch)

            root.true_branch = true_branch
            if (gainF == 0
                ):  # Check if the gain is 0... in that case that's a leaf
                false_branch = Leaf(false_rows)
            else:
                false_branch = Node(questionF, gainF, false_rows, root, False)
                nodes_to_split.append(false_branch)

            root.false_branch = false_branch

            currentNodes += 2

            #the number of nodes are not the max means that i can still partitionate if there are nodes that allows that
            #if the nodes to split are==0 means that there are not nodes to partitionate
            while (currentNodes < maxNodes and (not len(nodes_to_split) == 0)):
                # find the best gain from all the nodes. should be sorted?
                max = 0
                bestNodeIndex = 0
                for i in range(0, len(nodes_to_split)):

                    if (nodes_to_split[i].gain > max):
                        max = nodes_to_split[i].gain
                        bestNodeIndex = i

                #Now that we have the node with the best gain, we should partition as we did with the root
                i = bestNodeIndex

                true_rows, false_rows = partition(nodes_to_split[i].rows,
                                                  nodes_to_split[i].question)
                gainT, questionT = find_best_split(
                    true_rows, header
                )  #finds the best gini for both the false and the true
                gainF, questionF = find_best_split(false_rows, header)

                if (nodes_to_split[i].isATrueChild
                    ):  #the node has to stay on the true_branch

                    nodes_to_split[i].father.true_branch = DecisionNode(
                        nodes_to_split[i].question, None, None)
                    if (gainT == 0):
                        nodes_to_split[
                            i].father.true_branch.true_branch = Leaf(true_rows)
                    else:
                        true_branch = Node(
                            questionT, gainT, true_rows,
                            nodes_to_split[i].father.true_branch, True)
                        nodes_to_split[
                            i].father.true_branch.true_branch = true_branch
                        nodes_to_split.append(true_branch)

                    if (gainF == 0):
                        nodes_to_split[
                            i].father.true_branch.false_branch = Leaf(
                                false_rows)
                    else:
                        false_branch = Node(
                            questionF, gainF, false_rows,
                            nodes_to_split[i].father.true_branch, False)
                        nodes_to_split[
                            i].father.true_branch.false_branch = false_branch
                        nodes_to_split.append(false_branch)

                else:  #the node has to stay on the false_branch of the father
                    nodes_to_split[i].father.false_branch = DecisionNode(
                        nodes_to_split[i].question, None, None)
                    if (gainT == 0):
                        nodes_to_split[
                            i].father.false_branch.true_branch = Leaf(
                                true_rows)
                    else:
                        true_branch = Node(
                            questionT, gainT, true_rows,
                            nodes_to_split[i].father.false_branch, True)
                        nodes_to_split[
                            i].father.false_branch.true_branch = true_branch
                        nodes_to_split.append(true_branch)

                    if (gainF == 0):
                        nodes_to_split[
                            i].father.false_branch.false_branch = Leaf(
                                false_rows)
                    else:
                        false_branch = Node(
                            questionF, gainF, false_rows,
                            nodes_to_split[i].father.false_branch, False)
                        nodes_to_split[
                            i].father.false_branch.false_branch = false_branch
                        nodes_to_split.append(false_branch)

                del nodes_to_split[
                    i]  #delete the now decision Node from the list of Nodes to split
                currentNodes += 2
            ''' 
			Now there are 2 cases:
			1) the max number of nodes is reached. 
				This mean that if there are other Nodes in the list, those have to become a leaf.
			2) the length of the node list is 0, this means that there are no other question to ask
			
			We can check those cases with the len of the node list
			'''
            if (len(nodes_to_split) > 0):
                for node in nodes_to_split:
                    if (node.isATrueChild == True):
                        node.father.true_branch = Leaf(node.rows)
                    else:
                        node.father.false_branch = Leaf(node.rows)

            #print("Number of total node (inner included):"+ str(currentNodes))
            self.nodes = currentNodes
            return root
    def __init__(self, LexNode, value=None):
        Leaf.__init__(self, LexNode)

        self.leafValue = value
def _new_leaf(parent, object, name='name', icon=None):
    return Leaf(parent, object[name], object, icon)
Example #19
0
#

from Component import Component
from Leaf import Leaf
from Composite import Composite

def client_code(component: Component):
    print(f"RESULT: {component.operation()}", end = "")

def client_code2(composite: Component, component: Component):
    if composite.is_composite():
        composite.add(component)
    print(f"RESULT: {component.operation()}", end = "")

if __name__ == "__main__":
    simple = Leaf()
    print("Client: I've got a simple component:")
    client_code(simple)
    print("\n")

    tree = Composite()
    left_branch = Composite()
    left_branch.add(Leaf())
    left_branch.add(Leaf())

    right_branch = Composite()
    right_branch.add(Leaf())

    tree.add(left_branch)
    tree.add(right_branch)
    print("Client: Now I've got a composite tree:")
Example #20
0
    def __init__(self, LexNode, value=None):
        Leaf.__init__(self, LexNode)

        self.leafValue = value
Example #21
0
        """
    """
        Dank der Tatsache,  dass die Operationen zur Verwaltung der untergeordneten Komponenten 
        in der Basisklasse "Component"  deklariert sind, kann der Client-Code mit jeder beliebigen Komponente,
        ob einfach oder komplex, arbeiten, ohne von ihren konkreten Klassen abhängig zu sein.
        """
    if component1.is_composite():
        component1.add(component2)

    print(f"RESULT: {component1.operation()}", end="")


if __name__ == "__main__":
    # This way the client code can support the simple leaf components...
    print("Start")
    simple = Leaf()
    print("Client: I've got a simple component:")
    client_code(simple)
    print("\n")

    # ...as well as the complex composites.
    tree = Composite()

    branch1 = Composite()
    branch1.add(Leaf())
    branch1.add(Leaf())

    branch2 = Composite()
    branch2.add(Leaf())

    tree.add(branch1)
            if xmid - 4 < posx < xmid + 4:
                if len(row_item.children) > 0:
                    row_item.collapsed = not row_item.collapsed
                    self.parent.setData(self.parent.root.toList())
                return True
        return False


if __name__ == '__main__':

    app = QApplication(sys.argv)

    table = LargeTree()
    items = [("art", "abm1", "ttl1"), ("art", "abm1", "ttl2"),
             ("art", "abm2", "ttl3"), ("art2", "abm3", "ttl4")]

    from Song_Object import *
    import Song_LibraryFormat
    song_list = Song_LibraryFormat.musicLoad_LIBZ(
        r"D:\Dropbox\ConsolePlayer\user\music.libz")

    leaf = Leaf.items_to_tree([EnumSong.ARTIST, EnumSong.ALBUM],
                              lambda x: x[EnumSong.TITLE], song_list)
    p = leaf
    for i in range(10):
        p = Leaf(p, "-%d" % i, [])
    table.setRoot(leaf)
    table.container.resize(640, 320)
    table.container.show()

    sys.exit(app.exec_())
Example #23
0
 def __init__(self, name, description):
     Leaf.__init__(self, name description)
            row_item = self.parent.data[row_index]
            if row_item == None: return False;
            depth = row_item.depth
            offset = ((depth - (not self.bool_display_root) ) * self.paint_control_width)/2   
            xmid = self.paint_control_width/2 +(offset)
            if xmid - 4 < posx < xmid + 4:
                if len(row_item.children) > 0:
                    row_item.collapsed = not row_item.collapsed
                    self.parent.setData( self.parent.root.toList() )
                return True
        return False
if __name__ == '__main__':    
    
    app = QApplication(sys.argv)

    table = LargeTree()
    items = [ ("art","abm1","ttl1"), ("art","abm1","ttl2"), ("art","abm2","ttl3"), ("art2","abm3","ttl4") ]     
    
    from Song_Object import *
    import Song_LibraryFormat
    song_list = Song_LibraryFormat.musicLoad_LIBZ(r"D:\Dropbox\ConsolePlayer\user\music.libz")
    
    leaf = Leaf.items_to_tree([EnumSong.ARTIST,EnumSong.ALBUM],lambda x : x[EnumSong.TITLE], song_list)
    p = leaf
    for i in range(10):
        p=Leaf(p,"-%d"%i,[])
    table.setRoot(leaf)
    table.container.resize(640,320)
    table.container.show()
    
    sys.exit(app.exec_())
Example #25
0
class Phytomer(object):
    def __init__(self, phytomerNumber, rank, state, treeInstance, day, month,
                 year, TT, initiationday, initiationmonth, initiationyear,
                 initiationTT):
        #self.name = "Phytomer_" + repr(phytomerNumber)
        self.name = phytomerNumber

        self.rank = rank
        self.state = state
        self.tree = treeInstance
        self.appearanceDay = day
        self.appearanceMonth = month
        self.appearanceYear = year
        self.appearanceTT = TT
        self.initiationDay = initiationday
        self.initiationMonth = initiationmonth
        self.initiationYear = initiationyear
        self.initiationTT = initiationTT
        self.deathday = 0
        self.deathmonth = 0
        self.deathyear = 0
        self.deathTT = 0
        self.leaf = Leaf(self)
        self.bunch = Bunch(self)
        self.internode = Internode(self)
        self.demand = 0
        self.step_apparition = self.tree.simulation.step

    def getName(self):
        return self.name

    def getLeaf(self):
        return self.leaf

    def getBunch(self):
        return self.bunch

    def getRank(self):
        return self.rank

    def getAppearanceDay(self):
        return self.appearanceDay

    def getAppearanceMonth(self):
        return self.appearanceMonth

    def getAppearanceYear(self):
        return self.appearanceYear

    def getInternode(self):
        return self.internode

    def getTree(self):
        return self.tree

    def growth_demand(self, TEff):
        #organs = self.ActiveOrgans()
        #for o in organs :
        #    o.growth_demand(TEff)

        self.leaf.growth_demand(TEff)
        self.internode.growth_demand(TEff)
        if self.bunch.statut != 'RECOLTE':
            self.bunch.growth_demand(TEff)

    # print self.tree.plus_vieille_date
    #print self.leaf.demand, self.demand

    def growth(self):
        #organs = self.ActiveOrgans()
        #for o in organs :
        #    o.growth()
        self.leaf.growth()
        self.internode.growth()
        if self.bunch.statut != 'RECOLTE':
            self.bunch.growth()

    def ActiveOrgans(self):
        if self.state == 'DEAD':
            organs_active = [self.internode]
        if self.state == 'INACTIVE':
            organs_active = [self.leaf, self.bunch]
        if self.state == 'ACTIVE':
            organs_active = [self.leaf, self.bunch, self.internode]
        return (organs_active)

    def compute_facteur_age(self):

        if (GlobalVariables.INCREASE_TAILLE_REGIMES *
            (self.tree.simulation.step + self.step_apparition -
             self.tree.date_plus_jeune_feuille) +
                GlobalVariables.FACTEUR_AGE_INI) > 1:
            self.bunch.facteur_age_regimes = 1
        else:
            self.bunch.facteur_age_regimes = (
                GlobalVariables.INCREASE_TAILLE_REGIMES *
                (self.tree.simulation.step + self.step_apparition -
                 self.tree.date_plus_jeune_feuille) +
                GlobalVariables.FACTEUR_AGE_INI)

        self.bunch.pot_fruits_number = self.bunch.facteur_age_regimes * GlobalVariables.MEAN_FRUIT_NUMBER_ADULTE