Пример #1
0
 def _anomaly_log_prob(self, targets, prediction_ops):
   prediction = prediction_ops["mean"]
   if self._anomaly_distribution == AnomalyMixtureARModel.GAUSSIAN_ANOMALY:
     anomaly_variance = prediction_ops["anomaly_params"]
     anomaly_sigma = math_ops.sqrt(
         gen_math_ops.maximum(anomaly_variance, 1e-5))
     log_prob = math_utils.normal_log_prob(targets, anomaly_sigma, prediction)
   else:
     assert self._anomaly_distribution == AnomalyMixtureARModel.CAUCHY_ANOMALY
     anomaly_scale = prediction_ops["anomaly_params"]
     log_prob = math_utils.cauchy_log_prob(targets, anomaly_scale, prediction)
   return log_prob
Пример #2
0
 def _anomaly_log_prob(self, targets, prediction_ops):
   prediction = prediction_ops["mean"]
   if self._anomaly_distribution == AnomalyMixtureARModel.GAUSSIAN_ANOMALY:
     anomaly_variance = prediction_ops["anomaly_params"]
     anomaly_sigma = math_ops.sqrt(
         gen_math_ops.maximum(anomaly_variance, 1e-5))
     log_prob = math_utils.normal_log_prob(targets, anomaly_sigma, prediction)
   else:
     assert self._anomaly_distribution == AnomalyMixtureARModel.CAUCHY_ANOMALY
     anomaly_scale = prediction_ops["anomaly_params"]
     log_prob = math_utils.cauchy_log_prob(targets, anomaly_scale, prediction)
   return log_prob
Пример #3
0
 def loss_op(self, targets, prediction_ops):
   """Create loss_op."""
   prediction = prediction_ops["mean"]
   if self.loss == ARModel.NORMAL_LIKELIHOOD_LOSS:
     covariance = prediction_ops["covariance"]
     sigma = math_ops.sqrt(gen_math_ops.maximum(covariance, 1e-5))
     loss_op = -math_ops.reduce_sum(
         math_utils.normal_log_prob(targets, sigma, prediction))
   else:
     assert self.loss == ARModel.SQUARED_LOSS, self.loss
     loss_op = math_ops.reduce_sum(math_ops.square(prediction - targets))
   loss_op /= math_ops.cast(
       math_ops.reduce_prod(array_ops.shape(targets)), loss_op.dtype)
   return loss_op
Пример #4
0
 def loss_op(self, targets, prediction_ops):
   """Create loss_op."""
   prediction = prediction_ops["mean"]
   if self.loss == ARModel.NORMAL_LIKELIHOOD_LOSS:
     covariance = prediction_ops["covariance"]
     sigma = math_ops.sqrt(gen_math_ops.maximum(covariance, 1e-5))
     loss_op = -math_ops.reduce_sum(
         math_utils.normal_log_prob(targets, sigma, prediction))
   else:
     assert self.loss == ARModel.SQUARED_LOSS, self.loss
     loss_op = math_ops.reduce_sum(
         math_ops.squared_difference(prediction, targets))
   loss_op /= math_ops.cast(
       math_ops.reduce_prod(array_ops.shape(targets)), loss_op.dtype)
   return loss_op
Пример #5
0
 def loss_op(self, targets, prediction_ops):
   """Create loss_op."""
   prediction = prediction_ops["mean"]
   covariance = prediction_ops["covariance"]
   # Normal data log probability.
   sigma = math_ops.sqrt(gen_math_ops.maximum(covariance, 1e-5))
   log_prob1 = math_utils.normal_log_prob(targets, sigma, prediction)
   log_prob1 += math_ops.log(1 - self._anomaly_prior_probability)
   # Anomaly log probability.
   log_prob2 = self._anomaly_log_prob(targets, prediction_ops)
   log_prob2 += math_ops.log(self._anomaly_prior_probability)
   # We need to compute log(exp(log_prob1) + exp(log_prob2). For numerical
   # stability, we rewrite the expression as below.
   p1 = gen_math_ops.minimum(log_prob1, log_prob2)
   p2 = gen_math_ops.maximum(log_prob1, log_prob2)
   mixed_log_prob = p2 + math_ops.log(1 + gen_math_ops.exp(p1 - p2))
   loss_op = -math_ops.reduce_sum(mixed_log_prob)
   loss_op /= math_ops.cast(
       math_ops.reduce_prod(array_ops.shape(targets)), self.dtype)
   return loss_op
Пример #6
0
 def loss_op(self, targets, prediction_ops):
   """Create loss_op."""
   prediction = prediction_ops["mean"]
   covariance = prediction_ops["covariance"]
   # Normal data log probability.
   sigma = math_ops.sqrt(gen_math_ops.maximum(covariance, 1e-5))
   log_prob1 = math_utils.normal_log_prob(targets, sigma, prediction)
   log_prob1 += math_ops.log(1 - self._anomaly_prior_probability)
   # Anomaly log probability.
   log_prob2 = self._anomaly_log_prob(targets, prediction_ops)
   log_prob2 += math_ops.log(self._anomaly_prior_probability)
   # We need to compute log(exp(log_prob1) + exp(log_prob2). For numerical
   # stability, we rewrite the expression as below.
   p1 = gen_math_ops.minimum(log_prob1, log_prob2)
   p2 = gen_math_ops.maximum(log_prob1, log_prob2)
   mixed_log_prob = p2 + math_ops.log(1 + gen_math_ops.exp(p1 - p2))
   loss_op = -math_ops.reduce_sum(mixed_log_prob)
   loss_op /= math_ops.cast(
       math_ops.reduce_prod(array_ops.shape(targets)), self.dtype)
   return loss_op