def _check_is_fitted(self): """Check if the classifier is trained (fitted). Raises ------ NotFittedError If the classifier is not fitted. """ check_is_fitted(self, ['classes', 'n_features'])
def _check_is_fitted(self): """Check if the preprocessor is trained (fitted). Raises ------ NotFittedError If the preprocessor is not fitted. """ check_is_fitted(self, ['w', 'b'])
def _check_is_fitted(self): """Check if the classifier is trained (fitted). Raises ------ NotFittedError If the classifier is not fitted. """ # Do not check `b` as some classifiers do not set it check_is_fitted(self, 'w') super(CClassifierLinear, self)._check_is_fitted()
def _check_is_fitted(self): """Check if the classifier is trained (fitted). Raises ------ NotFittedError If the classifier is not fitted. """ if self._kernel is not None: check_is_fitted(self, '_tr') super(CClassifierRidge, self)._check_is_fitted()
def _check_is_fitted(self): """Check if the classifier is trained (fitted). Raises ------ NotFittedError If the classifier is not fitted. """ if not self.is_kernel_linear(): check_is_fitted(self, '_tr') super(CClassifierSGD, self)._check_is_fitted()
def _check_is_fitted(self): """Check if the classifier is trained (fitted). Raises ------ NotFittedError If the classifier is not fitted. """ if not self.is_kernel_linear() or self.store_dual_vars is True: check_is_fitted(self, 'sv') # Checking the SVs is enough # SVM is a special case, is not set '_w' if kernel is not linear # so we cannot call the superclass `_check_is_fitted` if self.is_kernel_linear(): check_is_fitted(self, 'w') # Then check the attributes of CClassifier check_is_fitted(self, ['classes', 'n_features'])