Esempio n. 1
0
 def test_importance_is_monotonic_in_mean(self):
     f = pb.Feature(feature=10, value=5)
     importances = []
     for mean in np.linspace(0.0, 3.0, 10):
         self._predictor._set_weight(f, pb.Gaussian(mean=0.5, variance=0.5))
         importances.append(self._predictor._importance(f))
     self.assertEqual(sorted(importances), importances)
Esempio n. 2
0
def prior_bias_weight(prior_probability, beta, num_features):
    """The prior weight on the bias such that on initialization of a model
    with the given parameters, P(y | x, initial_weights) = prior.
    """

    bias_mean = norm.ppf(prior_probability) * (beta ** 2 + num_features)
    return pb.Gaussian(mean=bias_mean, variance=1.0)
Esempio n. 3
0
    def _apply_dynamics(self, weight):
        prior = util.prior_weight()
        adjusted_variance = weight.variance * prior.variance / \
            ((1.0 - self._config.epsilon) * prior.variance +
             self._config.epsilon * weight.variance)
        adjusted_mean = adjusted_variance * (
            (1.0 - self._config.epsilon) * weight.mean / weight.variance +
            self._config.epsilon * prior.mean / prior.variance)

        adjusted = pb.Gaussian(mean=adjusted_mean, variance=adjusted_variance)
        logger.debug("Adjusting weight %s to %s", util.pp(weight),
                     util.pp(adjusted))
        return adjusted
Esempio n. 4
0
    def train(self, features, label):
        logger.info("Training: %s, %s features", label, len(features))
        assert len(features) == self._config.num_features

        y = util.label_to_float(label)
        total_mean, total_variance = self._active_mean_variance(features)
        v, w = util.gaussian_corrections(y * total_mean /
                                         np.sqrt(total_variance))

        for feature in features:
            weight = self._get_weight(feature)
            mean_delta = y * weight.variance / np.sqrt(total_variance) * v
            variance_multiplier = 1.0 - weight.variance / total_variance * w
            updated = pb.Gaussian(mean=weight.mean + mean_delta,
                                  variance=weight.variance *
                                  variance_multiplier)

            self._set_weight(feature, self._apply_dynamics(updated))
Esempio n. 5
0
 def test_dynamic_is_null_for_epsilon_zero(self):
     p = self._create_predictor(epsilon=0.0)
     initial = pb.Gaussian(mean=5.0, variance=0.5)
     adjusted = p._apply_dynamics(initial)
     self.assertAlmostEqual(initial.mean, adjusted.mean)
     self.assertAlmostEqual(initial.variance, adjusted.variance)
Esempio n. 6
0
 def test_dynamics_shift_towards_prior(self):
     p = self._create_predictor(epsilon=0.05)
     initial = pb.Gaussian(mean=5.0, variance=0.5)
     adjusted = p._apply_dynamics(initial)
     self.assertGreater(initial.mean, adjusted.mean)
     self.assertLess(initial.variance, adjusted.variance)
Esempio n. 7
0
 def test_importance_of_set_feature(self):
     f = pb.Feature(feature=10, value=5)
     self._predictor._set_weight(f, pb.Gaussian(mean=0.5, variance=0.5))
     self.assertGreater(self._predictor._importance(f), 0.0)
Esempio n. 8
0
def prior_weight():

    """The global prior on non-bias weights
    """
    return pb.Gaussian(mean=0.0, variance=1.0)