def test_sparse_coder_common_transformer(): rng = np.random.RandomState(777) n_components, n_features = 40, 3 init_dict = rng.rand(n_components, n_features) sc = SparseCoder(init_dict) check_transformer_data_not_an_array(sc.__class__.__name__, sc) check_transformer_general(sc.__class__.__name__, sc) check_transformer_general_memmap = partial(check_transformer_general, readonly_memmap=True) check_transformer_general_memmap(sc.__class__.__name__, sc) check_transformers_unfitted(sc.__class__.__name__, sc)
def test_sklearn_compliance(self): for encoder_name in encoders.__all__: with self.subTest(encoder_name=encoder_name): # in sklearn < 0.19.0, these methods require classes, # in sklearn >= 0.19.0, these methods require instances if sklearn.__version__ < '0.19.0': encoder = getattr(encoders, encoder_name) else: encoder = getattr(encoders, encoder_name)() check_transformer_general(encoder_name, encoder) check_transformers_unfitted(encoder_name, encoder)
def test_sklearn_compliance(self): for encoder_name in encoders.__all__: with self.subTest(encoder_name=encoder_name): # in sklearn < 0.19.0, these methods require classes, # in sklearn >= 0.19.0, these methods require instances if sklearn.__version__ < '0.19.0': encoder = getattr(encoders, encoder_name) else: encoder = getattr(encoders, encoder_name)() check_transformer_general(encoder_name, encoder) check_transformers_unfitted(encoder_name, encoder)
def test_general_transformers(self): check_transformer_general('hashing_encoder', HashingEncoder()) check_transformer_general('backward_difference_encoder', BackwardDifferenceEncoder()) check_transformer_general('binary_encoder', BinaryEncoder()) check_transformer_general('helmert_encoder', HelmertEncoder()) check_transformer_general('ordinal_encoder', OrdinalEncoder()) check_transformer_general('polynomial_encoder', PolynomialEncoder()) check_transformer_general('sum_coding', SumEncoder()) check_transformer_general('one_hot', OneHotEncoder()) check_transformer_general('basen', BaseNEncoder()) check_transformer_general('leave_one_out', LeaveOneOutEncoder())
def test_general_transformers(self): check_transformer_general('hashing_encoder', HashingEncoder) check_transformer_general('backward_difference_encoder', BackwardDifferenceEncoder) check_transformer_general('binary_encoder', BinaryEncoder) check_transformer_general('helmert_encoder', HelmertEncoder) check_transformer_general('ordinal_encoder', OrdinalEncoder) check_transformer_general('polynomial_encoder', PolynomialEncoder) check_transformer_general('sum_coding', SumEncoder) check_transformer_general('one_hot', OneHotEncoder) check_transformer_general('basen', BaseNEncoder)