def test_float_greater_than_one_raises_backward(): """ Test that float values not on (0, 1) raise. """ msg = "^If a float, `n_features` must be on" with pytest.raises(ValueError, match=msg): backward(n_features=1.5)
def test_min_features_requirement_backward(): """ Check that the requirement that at least two features must be present. """ msg = "less than 2 features present." with pytest.raises(IndexError, match=msg): backward(X_train=ones((501, 1)), X_val=ones((501, 1)))
def test_n_features_fails_on_string_backward(): """ Check that backward raises when passed a string for `n_features`. """ msg = "`n_features` must be of type int or float." with pytest.raises(TypeError, match=msg): backward(min_change=None, n_features='-0.75')
def test_n_features(): """ Test various version of n_features """ for n_features in (10000, -1.0): with pytest.raises(ValueError): # should raise backward(n_features=n_features) backward(n_features=0.5) # this should not raise
def test_min_change_greater_than_zero_backward(): """ Check `backward()`'s `min_change` raises when not greater than zero """ msg = "`min_change` must be greater than zero." with pytest.raises(ValueError, match=msg): backward(n_features=None, min_change=-0.75)
def test_both_non_none_backward(): """ Check `backward()` raise when at least one of `min_change` or `n_features` are not None. """ # Note: items in backticks (``) will be in alphabetical order. msg = "At least one of `min_change` and `n_features` must be None." with pytest.raises(TypeError, match=msg): backward(n_features=0.5, min_change=0.3)
def test_selection_class_use_of_criterion(): """Test Criterion through `forward()` and `backward().""" msg = "`criterion` must be one of: None, 'aic', 'bic'." with pytest.raises(ValueError, match=msg): forward(min_change=0.5, criterion='acc') with pytest.raises(ValueError, match=msg): backward(n_features=0.5, criterion='Santa')
def test_too_few_features(): """ Check that there are enough features for for selection to be a coherent goal (i.e., >= 2). """ X_train = DEFAULT_SELECTION_PARAMS['X_train'] X_train = X_train[:, 0:1] with pytest.raises(IndexError): forward(n_features=1, X_train=X_train) with pytest.raises(IndexError): backward(n_features=1, X_train=X_train)
def test_loop_exhaust(): """Text Exhausting backward()'s loop.""" backward(n_features=X_train.shape[-1], min_change=None, _do_not_skip=False)
def test_passing_significant_change(): """ Test cases where there is a significant `min_change` during backward selection. """ backward(n_features=None, min_change=1, _last_score_punt=True)
def test_bsel_verbose_output(): backward_output = backward(n_features=2, min_change=None, verbose=True) assert len(backward_output) >= 1
def test_bsel_min_change_output(): backward_output = backward(n_features=None, min_change=10, criterion='aic') assert len(backward_output) >= 1
def test_bsel_bic_output(): backward_output = backward(n_features=2, min_change=None, criterion='bic') assert len(backward_output) >= 1
# ----------------------------------------------------------------------------- # Outputs: Run the Forward and Backward Selection Algorithms # ----------------------------------------------------------------------------- forward_output = forward() # Run using the other parameter option forward_output += forward(n_features=1, min_change=None) # Force the backward selection algorithm to # select the single feature it thinks is most predictive. # If implemented correctly, `backward()` should be able to # identify `true_best_features` as predictive. backward_output = backward(n_features=1) # Run using the other parameter option backward_output += backward(min_change=0.0001, n_features=None) # ----------------------------------------------------------------------------- # Test outputs: Type # ----------------------------------------------------------------------------- def output_type(output): """ Test that output type is a list. """ msg = "Output from the algorithm was not a list." assert isinstance(output, list), msg