def test_make_parameterized_classification(): # Simple case mu0 = theano.shared(0.) mu1 = theano.shared(1.) p0 = Normal(mu=mu0) p1 = Normal(mu=mu1) X, y = make_parameterized_classification(p0, p1, 100, [mu0, mu1]) assert X.shape == (100, 1+2) assert_array_almost_equal(X[:, 1], np.zeros(100)) assert_array_almost_equal(X[:, 2], np.ones(100)) # Grid of parameter values X, y = make_parameterized_classification(p0, p1, 100, [(mu0, [0, 0.5]), (mu1, [0.5, 1.5])]) assert X.shape == (100, 1+2) assert_array_equal(np.unique(X[:, 1]), [0, 0.5]) assert_array_equal(np.unique(X[:, 2]), [0.5, 1.5]) d = set() for row in X[:, 1:]: d.add(tuple(row)) assert_array_equal(np.array(sorted(d)), [[0., 0.5], [0., 1.5], [0.5, 0.5], [0.5, 1.5]])
def test_parameterized_classifier(): mu0 = theano.shared(0) mu1 = theano.shared(1) p0 = Normal(mu=mu0) p1 = Normal(mu=mu1) X, y = make_parameterized_classification(p0, p1, 100, [mu0, mu1]) clf = ParameterizedClassifier(DecisionTreeClassifier(), params=[mu0, mu1]) clf.fit(X, y) assert clf.n_features_ == 1 assert_array_almost_equal(y, clf.predict(X))