def test_negative_values_in_weights_error(self): """Test that an exception is raised if there are negative values in sample_weight.""" x = CappingTransformer(capping_values={"a": [2, 10]}) with pytest.raises(ValueError, match="negative weights in sample weights"): x.weighted_quantile([2, 3, 4, 5], [0, 1], [2, -0.01])
def test_zero_total_weight_error(self): """Test that an exception is raised if the total sample weights are 0.""" x = CappingTransformer(capping_values={"a": [2, 10]}) with pytest.raises( ValueError, match="total sample weights are not greater than 0" ): x.weighted_quantile([2, 3, 4, 5], [0, 1], [0, 0])
def test_inf_values_in_weights_error(self): """Test that an exception is raised if there are inf values in sample_weight.""" x = CappingTransformer(capping_values={"a": [2, 10]}) with pytest.raises(ValueError, match="infinite values in sample weights"): x.weighted_quantile([2, 3, 4, 5], [0, 1], [2, np.inf]) with pytest.raises(ValueError, match="infinite values in sample weights"): x.weighted_quantile([2, 3, 4, 5], [0, 1], [1, -np.inf])
def test_expected_output( self, values, sample_weight, quantiles, expected_quantiles ): """Test that weighted_quantile gives the expected outputs.""" x = CappingTransformer(capping_values={"a": [2, 10]}) values = pd.Series(values) actual = x.weighted_quantile(values, quantiles, sample_weight) # round to 1dp to avoid mismatches due to numerical precision actual_rounded_1_dp = list(np.round(actual, 1)) assert ( actual_rounded_1_dp == expected_quantiles ), "unexpected weighted quantiles calculated"