def predict_new(self, X, only_topk=None, csr_codes=None, beam_size=2, max_depth=None, cond_prob=True, normalized=False, threads=-1): if max_depth is None: max_depth = self.depth if cond_prob is None or cond_prob == False: cond_prob = PostProcessor(Transform.identity, Combiner.noop) if cond_prob == True: cond_prob = PostProcessor(Transform.get_lpsvm(3), Combiner.mul) assert isinstance(cond_prob, PostProcessor), tpye(cond_prob) assert X.shape[1] == self.nr_features if self.bias > 0: X = smat_util.append_column(X, self.bias) pX = PyMatrix.init_from(X, dtype=self.model_chain[0].pW.dtype) max_depth = min(self.depth, max_depth) pred_csr = csr_codes for d in range(max_depth): cur_model = self.model_chain[d] local_only_topk = only_topk if d == (max_depth - 1) else beam_size pred_csr = cur_model.predict_new(pX, only_topk=local_only_topk, csr_codes=pred_csr, cond_prob=cond_prob, threads=threads) if normalized: pred_csr = sk_normalize(pred_csr, axis=1, copy=False, norm='l1') return pred_csr
def __init__(self, X, Y, C=None, bias=-1.0, dtype=None): if dtype is None: dtype = X.dtype self.bias = bias if self.bias > 0: X = smat_util.append_column(X, self.bias) self.pX = PyMatrix.init_from(X, dtype) self.pY = PyMatrix.init_from(Y, dtype) self.pC = PyMatrix.init_from(C, dtype) Z = None if C is None else smat.csr_matrix(self.Y.dot(self.C)) self.pZ = PyMatrix.init_from(Z, dtype) # Z = Y * C self.dtype = dtype
def __init__(self, X, Y, C=None, bias=-1.0, dtype=None, Z_pred=None): if dtype is None: dtype = X.dtype self.bias = bias if self.bias > 0: X = smat_util.append_column(X, self.bias) self.pX = PyMatrix.init_from(X, dtype) self.pY = PyMatrix.init_from(Y, dtype) self.pC = PyMatrix.init_from(C, dtype) Z = None if C is None else smat.csr_matrix(self.Y.dot(self.C)) if Z_pred is not None and Z is not None: print("Z", Z.shape) print("Z_pred", Z_pred.shape) Z = Z + Z_pred Z = Z.tocsr() self.pZ = PyMatrix.init_from(Z, dtype) # Z = Y * C self.dtype = dtype