def _random_sample(self, log_alpha, u, layer): """Returns sampled random variables parameterized by log_alpha.""" # Generate tied randomness for later if layer not in self.uniform_samples_v: self.uniform_samples_v[layer] = self._u_to_v(log_alpha, u) # Sample random variable underlying softmax/argmax x = log_alpha + U.safe_log_prob(u) - U.safe_log_prob(1 - u) samples = tf.stop_gradient(tf.to_float(x > 0)) return { 'preactivation': x, 'activation': samples, 'log_param': log_alpha, }
def _random_sample_soft(self, log_alpha, u, layer, temperature=None): """Returns sampled random variables parameterized by log_alpha.""" if temperature is None: temperature = self.hparams.temperature # Sample random variable underlying softmax/argmax x = log_alpha + U.safe_log_prob(u) - U.safe_log_prob(1 - u) x /= temperature if self.hparams.muprop_relaxation: x += temperature / (temperature + 1) * log_alpha y = tf.nn.sigmoid(x) return {'preactivation': x, 'activation': y, 'log_param': log_alpha}
def _random_sample_soft(self, log_alpha, u, layer, temperature=None): """Returns sampled random variables parameterized by log_alpha.""" if temperature is None: temperature = self.hparams.temperature # Sample random variable underlying softmax/argmax x = log_alpha + U.safe_log_prob(u) - U.safe_log_prob(1 - u) x /= tf.expand_dims(temperature, -1) if self.hparams.muprop_relaxation: y = tf.nn.sigmoid(x + log_alpha * tf.expand_dims(temperature/(temperature + 1), -1)) else: y = tf.nn.sigmoid(x) return { 'preactivation': x, 'activation': y, 'log_param': log_alpha }