Example #1
0
 def _get_latents(self, inputs, scope_name='model/enc_cla', reuse=False):
     with tf.variable_scope(scope_name, reuse=reuse):
         mlp = MLP(name='inputs_to_latents',
                   shapes=[self.xdim] + self.hidden_layer_specs['enc'] +
                   [self.zdim],
                   activ=ACTIV)
         return mlp.forward(inputs)
Example #2
0
 def _get_class_logits(self, scope_name='model/preds'):
     with tf.variable_scope(scope_name):
         mlp = MLP(name='data_to_class_preds',
                   shapes=[self.xdim] +
                   self.hidden_layer_specs['layer_sizes'] + [self.ydim],
                   activ=self.hidden_layer_specs['activ'])
         logits = mlp.forward(self.X)
         return logits
Example #3
0
 def _get_recon_inputs(self, latents, scope_name='model/enc_cla'):
     with tf.variable_scope(scope_name):
         mlp = MLP(name='latents_to_reconstructed_inputs',
                   shapes=[self.zdim + 1] + self.hidden_layer_specs['rec'] + [self.xdim],
                   activ=ACTIV)
         Z_and_A = tf.concat([self.Z, self.A], axis=1)
         final_reps = mlp.forward(Z_and_A)
         return final_reps
Example #4
0
 def _get_sensitive_logits(self,
                           inputs,
                           scope_name='model/aud',
                           reuse=False):
     with tf.variable_scope(scope_name, reuse=reuse):
         mlp = MLP(name='latents_to_sensitive_logits',
                   shapes=[self.zdim + 1 * self.ydim] +
                   self.hidden_layer_specs['aud'] + [self.adim],
                   activ=ACTIV)
         return mlp.forward(inputs)
Example #5
0
    def _get_idks(self, scope_name='model/preds'):

        with tf.variable_scope(scope_name):
            mlp = MLP(name='data_to_idks',
                      shapes=[self.xdim + self.ydim] +
                      self.hidden_layer_specs['layer_sizes'] + [self.ydim],
                      activ=self.hidden_layer_specs['activ'])
            Y_hat_no_grad = tf.stop_gradient(self.Y_hat)
            self.idk_logits = mlp.forward(
                tf.concat([self.X, Y_hat_no_grad], axis=1))
            idks = tf.sigmoid(self.idk_logits)
            return idks