def predict_smart(self, x_test):
        y = []
        number_of_test_examples = len(x_test)
        for i in xrange(number_of_test_examples):
            search_tree = SearchTree(self.number_of_labels)
            root = search_tree.add_vertex("root_", 0, 1.0, 1.0, [], x_test[i])
            h = []
            heapq.heappush(h, (root.get_inversed_cond_prob(), root))
            best_leaf = None
            best_score = 0.0

            while len(h) != 0:
                vertex = heapq.heappop(h)[1]
                if vertex.get_conditional_prob() < best_score:
                    break
                else:
                    greedy_leaf = self.look_ahead(search_tree, vertex, h, best_score)
                    if greedy_leaf is None:
                        continue
                    else:
                        best_leaf = greedy_leaf
                        best_score = greedy_leaf.get_conditional_prob()

            y.append(best_leaf.get_labels())

        return np.array(y)
Example #2
0
    def predict_smart(self, x_test):
        y = []
        number_of_test_examples = len(x_test)
        for i in xrange(number_of_test_examples):
            search_tree = SearchTree(self.number_of_labels)
            root = search_tree.add_vertex("root_", 0, 1.0, 1.0, [], x_test[i])
            h = []
            heapq.heappush(h, (root.get_inversed_cond_prob(), root))
            best_leaf = None
            best_score = 0.0

            while len(h) != 0:
                vertex = heapq.heappop(h)[1]
                if vertex.get_conditional_prob() < best_score:
                    break
                else:
                    greedy_leaf = self.look_ahead(search_tree, vertex, h,
                                                  best_score)
                    if greedy_leaf is None:
                        continue
                    else:
                        best_leaf = greedy_leaf
                        best_score = greedy_leaf.get_conditional_prob()

            y.append(best_leaf.get_labels())

        return np.array(y)
    def predict_full(self, x_test):
        y = []
        number_of_test_examples = len(x_test)
        for i in xrange(number_of_test_examples):
            search_tree = SearchTree(self.number_of_labels)
            root = search_tree.add_vertex("root_", 0, 1.0, 1.0, [], x_test[i])
            self.generate_search_tree(search_tree, root)
            best_leaf = search_tree.find_best_leaf()
            y.append(best_leaf.get_labels())
            # print "Done: " + str(i) + "/" + str(number_of_training_examples)

        return np.array(y)
Example #4
0
    def predict_full(self, x_test):
        y = []
        number_of_test_examples = len(x_test)
        for i in xrange(number_of_test_examples):
            search_tree = SearchTree(self.number_of_labels)
            root = search_tree.add_vertex("root_", 0, 1.0, 1.0, [], x_test[i])
            self.generate_search_tree(search_tree, root)
            best_leaf = search_tree.find_best_leaf()
            y.append(best_leaf.get_labels())
            # print "Done: " + str(i) + "/" + str(number_of_training_examples)

        return np.array(y)
Example #5
0
    def predict_ucs(self, x_test, epsilon):
        y = []
        number_of_test_examples = len(x_test)
        for i in xrange(number_of_test_examples):
            search_tree = SearchTree(self.number_of_labels)
            root = search_tree.add_vertex("root_", 0, 1.0, 1.0, [], x_test[i])
            q = []
            greedy_q = []
            heapq.heappush(q, (root.get_inversed_cond_prob(), root))
            best_leaf = None
            while len(q) != 0:
                vertex = heapq.heappop(q)[1]
                if len(vertex.get_labels()
                       ) == self.number_of_labels:  # If leaf
                    best_leaf = vertex
                    break
                else:
                    children = self.generate_children(search_tree, vertex)
                    no_children_inserted = True
                    for child in children:
                        # print child.get_inversed_cond_prob()
                        if child.get_probability() > epsilon:
                            heapq.heappush(
                                q, (child.get_inversed_cond_prob(), child))
                            no_children_inserted = False
                    if no_children_inserted:
                        heapq.heappush(
                            greedy_q,
                            (vertex.get_inversed_cond_prob(), vertex))

            if best_leaf is None:
                best_score = 0.0

                while len(greedy_q) != 0:
                    vertex = heapq.heappop(greedy_q)[1]
                    greedy_leaf = self.look_ahead(search_tree, vertex, [],
                                                  best_score)
                    if greedy_leaf is None:
                        continue
                    else:
                        best_leaf = greedy_leaf
                        best_score = greedy_leaf.get_conditional_prob()
                        # print best_leaf, best_score
                # print("------")
            y.append(best_leaf.get_labels())
            # print "Done: " + str(i) + "/" + str(number_of_training_examples)

        return np.array(y)
    def predict_ucs(self, x_test, epsilon):
        y = []
        number_of_test_examples = len(x_test)
        for i in xrange(number_of_test_examples):
            search_tree = SearchTree(self.number_of_labels)
            root = search_tree.add_vertex("root_", 0, 1.0, 1.0, [], x_test[i])
            q = []
            greedy_q = []
            heapq.heappush(q, (root.get_inversed_cond_prob(), root))
            best_leaf = None
            while len(q) != 0:
                vertex = heapq.heappop(q)[1]
                if len(vertex.get_labels()) == self.number_of_labels:  # If leaf
                    best_leaf = vertex
                    break
                else:
                    children = self.generate_children(search_tree, vertex)
                    no_children_inserted = True
                    for child in children:
                        # print child.get_inversed_cond_prob()
                        if child.get_probability() > epsilon:
                            heapq.heappush(q, (child.get_inversed_cond_prob(), child))
                            no_children_inserted = False
                    if no_children_inserted:
                        heapq.heappush(greedy_q, (vertex.get_inversed_cond_prob(), vertex))

            if best_leaf is None:
                best_score = 0.0

                while len(greedy_q) != 0:
                    vertex = heapq.heappop(greedy_q)[1]
                    greedy_leaf = self.look_ahead(search_tree, vertex, [], best_score)
                    if greedy_leaf is None:
                        continue
                    else:
                        best_leaf = greedy_leaf
                        best_score = greedy_leaf.get_conditional_prob()
                        # print best_leaf, best_score
                # print("------")
            y.append(best_leaf.get_labels())
            # print "Done: " + str(i) + "/" + str(number_of_training_examples)

        return np.array(y)