def cauchy_alternative_to_gaussian(current_times, current_values, outputs): """A Cauchy anomaly distribution, centered at a Gaussian prediction. Performs an entropy-matching approximation of the scale parameters of independent Cauchy distributions given the covariance matrix of a multivariate Gaussian in outputs["covariance"], and centers the Cauchy distributions at outputs["mean"]. This requires that the model that we are creating an alternative/anomaly distribution for produces a mean and covariance. Args: current_times: A [batch size] Tensor of times, unused. current_values: A [batch size x num features] Tensor of values to evaluate the anomaly distribution at. outputs: A dictionary of Tensors with keys "mean" and "covariance" describing the Gaussian to construct an anomaly distribution from. The value corresponding to "mean" has shape [batch size x num features], and the value corresponding to "covariance" has shape [batch size x num features x num features]. Returns: A [batch size] Tensor of log likelihoods; the anomaly log PDF evaluated at `current_values`. """ del current_times # unused cauchy_scale = math_utils.entropy_matched_cauchy_scale(outputs["covariance"]) individual_log_pdfs = distributions.StudentT( df=array_ops.ones([], dtype=current_values.dtype), loc=outputs["mean"], scale=cauchy_scale).log_prob(current_values) return math_ops.reduce_sum(individual_log_pdfs, axis=1)
def __init__(self, tf, valid_data_len, train_and_valid_data, hyper_param_dim): self._valid_data = train_and_valid_data[:, :valid_data_len] self._train_data = train_and_valid_data[:, valid_data_len:] self._l2_reg_strength = tf.placeholder(tf.float32, shape=[hyper_param_dim]) self._model_lr = tf.placeholder(tf.float32) self._cross_entropy = None self._params = StudentTParams(tf) self._model_distribution = dist.StudentT(df=self._params.shape, loc=self._params.y, scale=self._params.scale)
def _anomaly_log_prob(self, targets, prediction_ops): prediction = prediction_ops["mean"] if self._anomaly_distribution == AnomalyMixtureARModel.GAUSSIAN_ANOMALY: anomaly_variance = prediction_ops["anomaly_params"] anomaly_sigma = math_ops.sqrt( gen_math_ops.maximum(anomaly_variance, 1e-5)) normal = distributions.Normal(loc=targets, scale=anomaly_sigma) log_prob = normal.log_prob(prediction) else: assert self._anomaly_distribution == AnomalyMixtureARModel.CAUCHY_ANOMALY anomaly_scale = prediction_ops["anomaly_params"] cauchy = distributions.StudentT(df=array_ops.ones( [], dtype=anomaly_scale.dtype), loc=targets, scale=anomaly_scale) log_prob = cauchy.log_prob(prediction) return log_prob
from tensorflow.contrib import distributions as tfcd g = tf.Graph() """ The idea comes from Lemma 1 from the paper "Some Characterizations of the Multivariate t Distribution": https://ac.els-cdn.com/0047259X72900218/1-s2.0-0047259X72900218-main.pdf?_tid=9035319c-e165-4b9a-8851-ec6837fcfe0e&acdnat=1527542834_a6193c0ddc8296cfcf2bc38c73536890 """ with g.as_default(): # Create a vectorized t-distribution. studentT = tfcd.StudentT( df=2.1, loc=[[0.0,0.0]], scale=[[1.0,1.0]] ) # Scale matrix for the multivariate t-distribution. scale_matrix = [[2.0, 0.5], [0.5, 2.0]] # Compute the Cholesky decomposition of the scale matrix. tril = tf.cholesky([scale_matrix])[0] # In this name scope we create affine transform of the vectorized t-distribution # which will result in a 2-D t-distribution with a prescribed scale matrix. with tf.name_scope("multi_studentT"): # Create the multivariate t-distribution via an affine transform with # a lower-trianguler matrix.
def __init__(self): self.studentT_dist = tfcd.StudentT(df=4, loc=0., scale=1.) super(StudentTCDF, self).__init__(forward_min_event_ndims=0, validate_args=False, name="StudentTCDF")