def test(self, *args, **kwargs):
     ''' Usage:
         Y_label[, ap][, complexity][, depths]
             = test(X, Y=None, param=None,
                    return_complexity=False, return_depth=False,
                    verbose=0)
     '''
     X, Y, param, return_complexity, return_depth, verbose \
         = parse_args(args,
                      (np.ndarray, np.ndarray, SoftLabelTree.Param,
                       bool,  bool,  int),
                      (None,       None,       None,
                       False, False, 0))
     X                 = kwargs.get('X',                 X)
     Y                 = kwargs.get('Y',                 Y)
     param             = kwargs.get('param',             param)
     return_complexity = kwargs.get('return_complexity', return_complexity)
     return_depth      = kwargs.get('return_depth',      return_depth)
     verbose           = kwargs.get('verbose',           verbose)
     Y_predict  = None
     ap         = None
     complexity = None
     depths     = None
     if X is not None and X.shape[-1] == self.root.data.dimension:
         result = self.test_proba(X, param, return_complexity, return_depth)
         if return_complexity and return_depth:
             Y_proba, complexity, depths = result
         elif not return_complexity and return_depth:
             Y_proba, depths = result
         elif return_complexity and not return_depth:
             Y_proba, complexity = result
         else:
             Y_proba = result
         Y_predict = np.argmax(Y_proba, axis=-1) + 1
         if Y is not None and Y.shape[0] == X.shape[0]:
             Y_truth = np.zeros((X.shape[0], self.root.data.num_of_labels),
                                dtype=np.int8)
             Y_truth[xrange(Y_truth.shape[0]), Y - 1] = 1
             posinfs = np.where(np.isposinf(Y_proba))
             Y_proba[posinfs] = 0
             Y_proba[posinfs] = np.max(Y_proba) + 1
             neginfs = np.where(np.isneginf(Y_proba))
             Y_proba[neginfs] = 0
             Y_proba[neginfs] = np.min(Y_proba) - 1
             ap = np.empty(self.root.data.num_of_labels)
             for i_label in xrange(self.root.data.num_of_labels):
                 ap[i_label] = average_precision_score(Y_truth[:, i_label],
                                                       Y_proba[:, i_label])
     return pack_tuple(Y_predict, ap, complexity, depths)
 def test_proba(self, *args, **kwargs):
     ''' Usage:
         Y_proba[, complexity][, depths]
             = test_proba(X, param=None,
                          return_complexity=False,
                          return_depth=False,
                          verbose=0)
     '''
     X, param, return_complexity, return_depth, verbose \
         = parse_args(args, (np.ndarray,
                             SoftLabelTree.Param,
                             bool,
                             bool,
                             int),
                            (None,
                             self.tree_param,
                             False,
                             False,
                             0))
     X                 = kwargs.get('X',                 X)
     param             = kwargs.get('param',             param)
     return_complexity = kwargs.get('return_complexity', return_complexity)
     return_depth      = kwargs.get('return_depth',      return_depth)
     verbose           = kwargs.get('verbose',           verbose)
     Y_proba = np.empty((X.shape[0], self.root.data.num_of_labels),
                        dtype=np.double)
     complexity   = None
     depths       = None
     Y_proba[...] = -np.inf
     if return_depth:      depths     = np.zeros(X.shape[0], dtype=np.int)
     if return_complexity: complexity = np.zeros(X.shape[0], dtype=np.int)
     for i in xrange(X.shape[0]):
         if verbose > 0:
             print '\rTesting sample {}/{} ...'.format(i+1, X.shape[0]),
         x = X[i]
         node = self.root
         while not (node.is_leaf() or self._should_be_leaf(node, param)):
             if return_complexity:
                 complexity[i] += node.model.n_nonzeros
             if node.model.test_one(x) > 0:
                 node = node.lchild
             else:
                 node = node.rchild
         if node.model is None or not param.ovr_in_leaves:
             num_distrib = node.data.num_of_samples_of_each_label. \
                 astype(np.double)
             proba = num_distrib / np.sum(num_distrib)
             Y_proba[i, node.data.labels-1] = proba
         else:
             proba = node.model.decision_function(x).ravel()
             num_model_classes = node.model.classes_.shape[0]
             if num_model_classes <= 2:
                 proba = np.r_[-proba, proba]
                 num_model_classes -= 1
             Y_proba[i, node.model.classes_-1] = proba
             if return_complexity:
                 complexity[i] += np.count_nonzero(node.model.coef_)
         if return_depth:
                 depths[i] = node.depth
     if verbose > 0:
         print '\rDone.'
     return pack_tuple(Y_proba, complexity, depths)