def test(self, *args, **kwargs): ''' Usage: Y_label[, ap][, complexity][, depths] = test(X, Y=None, param=None, return_complexity=False, return_depth=False, verbosity=0) ''' X, Y, return_complexity, return_depth, verbosity \ = parse_args(args, (np.ndarray, np.ndarray, bool, bool, int), (None, None, False, False, 0)) X = kwargs.get('X', X) Y = kwargs.get('Y', Y) return_complexity = kwargs.get('return_complexity', return_complexity) return_depth = kwargs.get('return_depth', return_depth) verbosity = kwargs.get('verbosity', verbosity) Y_predict = None ap = None complexity = None depths = None if X is not None: result = self.test_proba(X, return_complexity, return_depth, verbosity) if return_complexity and return_depth: Y_proba, complexity, depths = result elif not return_complexity and return_depth: Y_proba, depths = result elif return_complexity and not return_depth: Y_proba, complexity = result else: Y_proba = result Y_predict = np.argmax(Y_proba, axis=-1) + 1 if Y is not None and Y.shape[0] == X.shape[0]: ap = average_precision(Y, Y_proba) return pack_seq(Y_predict, ap, complexity, depths)
def set_up(self, *args, **kwargs): args = unpack_tuple(args) self.gd_param, self.sgd_param, self.sd_param, self.model_param, self.ovr_param, self.tree_param = parse_args(args, (SoftDecisionSolver.GDParam, SoftDecisionSolver.SGDParam, SoftDecisionSolver.SDParam, SoftDecisionModel.Param, dict, SoftLabelTree.Param), (self.gd_param, self.sgd_param, self.sd_param, self.model_param, self.ovr_param, self.tree_param)) self.__dict__.update(kwargs) if self.gd_param is not None and \ self.sgd_param is not None and \ self.sd_param is not None: self.solver.set_up(self.gd_param, self.sgd_param, self.sd_param)
def test_proba(self, *args, **kwargs): ''' Usage: Y_proba[, complexity][, depths] = test_proba(X, return_complexity=False, return_depth=False, verbosity=0) ''' X, return_complexity, return_depth, verbosity \ = parse_args(args, (np.ndarray, bool, bool, int), (None, False, False, 0)) X = kwargs.get('X', X) return_complexity = kwargs.get('return_complexity', return_complexity) return_depth = kwargs.get('return_depth', return_depth) verbosity = kwargs.get('verbosity', verbosity) num_of_parallel_jobs = self.forest_param.num_of_parallel_jobs if num_of_parallel_jobs > 1 and len(self) > 1: if num_of_parallel_jobs > len(self): num_of_parallel_jobs = len(self) test_result = Parallel(n_jobs=num_of_parallel_jobs, verbose=verbosity) \ (delayed(_parallel_test)(sltree, X, return_complexity, return_depth) for sltree in self) else: test_result = [None] * len(self) for i, sltree in enumerate(self): if verbosity > 0: print '\rTesting SoftLabelTree {}/{}.' \ .format(i + 1, len(self)), test_result[i] = _parallel_test(sltree, X, return_complexity, return_depth) complexity_all_trees = None depths_all_trees = None if return_complexity or return_depth: unzipped_result = zip(*test_result) if return_complexity and not return_depth: Y_proba_all_trees, complexity_all_trees = unzipped_result elif not return_complexity and return_depth: Y_proba_all_trees, depths_all_trees = unzipped_result else: Y_proba_all_trees, complexity_all_trees, depths_all_trees = \ unzipped_result else: Y_proba_all_trees = test_result Y_proba = np.zeros(Y_proba_all_trees[0].shape) complexity = None depths = None for Y_proba_each_tree in Y_proba_all_trees: Y_proba += Y_proba_each_tree if complexity_all_trees is not None: complexity = np.vstack(complexity_all_trees).T if depths_all_trees is not None: depths = np.vstack(depths_all_trees).T return pack_seq(Y_proba, complexity, depths)
def test(self, *args, **kwargs): ''' Usage: Y_label[, ap][, complexity][, depths] = test(X, Y=None, param=None, return_complexity=False, return_depth=False, verbose=0) ''' X, Y, param, return_complexity, return_depth, verbose \ = parse_args(args, (np.ndarray, np.ndarray, SoftLabelTree.Param, bool, bool, int), (None, None, None, False, False, 0)) X = kwargs.get('X', X) Y = kwargs.get('Y', Y) param = kwargs.get('param', param) return_complexity = kwargs.get('return_complexity', return_complexity) return_depth = kwargs.get('return_depth', return_depth) verbose = kwargs.get('verbose', verbose) Y_predict = None ap = None complexity = None depths = None if X is not None and X.shape[-1] == self.root.data.dimension: result = self.test_proba(X, param, return_complexity, return_depth) if return_complexity and return_depth: Y_proba, complexity, depths = result elif not return_complexity and return_depth: Y_proba, depths = result elif return_complexity and not return_depth: Y_proba, complexity = result else: Y_proba = result Y_predict = np.argmax(Y_proba, axis=-1) + 1 if Y is not None and Y.shape[0] == X.shape[0]: Y_truth = np.zeros((X.shape[0], self.root.data.num_of_labels), dtype=np.int8) Y_truth[xrange(Y_truth.shape[0]), Y - 1] = 1 posinfs = np.where(np.isposinf(Y_proba)) Y_proba[posinfs] = 0 Y_proba[posinfs] = np.max(Y_proba) + 1 neginfs = np.where(np.isneginf(Y_proba)) Y_proba[neginfs] = 0 Y_proba[neginfs] = np.min(Y_proba) - 1 ap = np.empty(self.root.data.num_of_labels) for i_label in xrange(self.root.data.num_of_labels): ap[i_label] = average_precision_score(Y_truth[:, i_label], Y_proba[:, i_label]) return pack_tuple(Y_predict, ap, complexity, depths)
def set_up(self, *args, **kwargs): self.gd_param, \ self.sgd_param, \ self.sd_param, \ self.model_param, \ self.ovr_param, \ self.tree_param, \ self.copy_from = parse_args(args, (GDParam, SGDParam, SDParam, ModelParam, dict, TreeParam, SoftLabelTree), (self.gd_param, self.sgd_param, self.sd_param, self.model_param, self.ovr_param, self.tree_param, self.copy_from)) vars(self).update(kwargs)
def set_up(self, *args, **kwargs): self.gd_param, \ self.sgd_param, \ self.sd_param, \ self.model_param, \ self.ovr_param, \ self.tree_param, \ self.forest_param = parse_args(args, (GDParam, SGDParam, SDParam, ModelParam, dict, TreeParam, ForestParam), (self.gd_param, self.sgd_param, self.sd_param, self.model_param, self.ovr_param, self.tree_param, self.forest_param)) vars(self).update(kwargs)
def test_proba(self, *args, **kwargs): ''' Usage: Y_proba[, complexity][, depths] = test_proba(X, param=None, return_complexity=False, return_depth=False, verbose=0) ''' X, param, return_complexity, return_depth, verbose \ = parse_args(args, (np.ndarray, SoftLabelTree.Param, bool, bool, int), (None, self.tree_param, False, False, 0)) X = kwargs.get('X', X) param = kwargs.get('param', param) return_complexity = kwargs.get('return_complexity', return_complexity) return_depth = kwargs.get('return_depth', return_depth) verbose = kwargs.get('verbose', verbose) Y_proba = np.empty((X.shape[0], self.root.data.num_of_labels), dtype=np.double) complexity = None depths = None Y_proba[...] = -np.inf if return_depth: depths = np.zeros(X.shape[0], dtype=np.int) if return_complexity: complexity = np.zeros(X.shape[0], dtype=np.int) for i in xrange(X.shape[0]): if verbose > 0: print '\rTesting sample {}/{} ...'.format(i+1, X.shape[0]), x = X[i] node = self.root while not (node.is_leaf() or self._should_be_leaf(node, param)): if return_complexity: complexity[i] += node.model.n_nonzeros if node.model.test_one(x) > 0: node = node.lchild else: node = node.rchild if node.model is None or not param.ovr_in_leaves: num_distrib = node.data.num_of_samples_of_each_label. \ astype(np.double) proba = num_distrib / np.sum(num_distrib) Y_proba[i, node.data.labels-1] = proba else: proba = node.model.decision_function(x).ravel() num_model_classes = node.model.classes_.shape[0] if num_model_classes <= 2: proba = np.r_[-proba, proba] num_model_classes -= 1 Y_proba[i, node.model.classes_-1] = proba if return_complexity: complexity[i] += np.count_nonzero(node.model.coef_) if return_depth: depths[i] = node.depth if verbose > 0: print '\rDone.' return pack_tuple(Y_proba, complexity, depths)