Ejemplo n.º 1
0
    def k_diag(self, input_x, return_full=True):
        """Iteratively building the diagonal part (variance) of the NNGP kernel.

    Args:
      input_x: tensor of input of size [num_data, input_dim].
      return_full: boolean for output to be [num_data] sized or a scalar value
        for normalized inputs

    Sets self.layer_qaa_dict of {layer #: qaa at the layer}

    Returns:
      qaa: variance at the output.
    """
        with tf.name_scope("Kdiag"):
            # If normalized input length starts at 1.
            if self.use_fixed_point_norm:
                current_qaa = self.var_fixed_point
            else:
                current_qaa = self.weight_var * tf.convert_to_tensor(
                    [1.], dtype=tf.float64) + self.bias_var
            self.layer_qaa_dict = {0: current_qaa}
            for l in range(self.depth):
                with tf.name_scope("layer_%d" % l):
                    samp_qaa = interp.interp_lin(self.var_aa_grid,
                                                 self.qaa_grid, current_qaa)
                    samp_qaa = self.weight_var * samp_qaa + self.bias_var
                    self.layer_qaa_dict[l + 1] = samp_qaa
                    current_qaa = samp_qaa

            if return_full:
                qaa = tf.tile(current_qaa[:1], ([input_x.shape[0].value]))
            else:
                qaa = current_qaa[0]
            return qaa
Ejemplo n.º 2
0
    def k_diag(self, input_x, return_full=True):
        """Iteratively building the diagonal part (variance) of the NNGP kernel.

        Args:
            input_x: tensor of input of size [num_data, input_dim].
            return_full: boolean for output to be [num_data] sized or a scalar value
                for normalized inputs

        Sets self.layer_qaa_dict of {layer #: qaa at the layer}

        Returns:
        qaa: variance at the output.
        """
        current_qaa = self.weight_var * np.array([1.]) + self.bias_var
        self.layer_qaa_dict = {0: current_qaa}
        for l in range(self.depth):
            samp_qaa = interp.interp_lin(self.var_aa_grid, self.qaa_grid,
                                         current_qaa)
            samp_qaa = self.weight_var * samp_qaa + self.bias_var
            self.layer_qaa_dict[l + 1] = samp_qaa
            current_qaa = samp_qaa

        if return_full:
            qaa = np.repeat(current_qaa[:1], [input_x.shape[0]])
        else:
            qaa = current_qaa[0]
        return qaa
Ejemplo n.º 3
0
    def get_var_fixed_point(self):
        with tf.name_scope("get_var_fixed_point"):
            # If normalized input length starts at 1.
            current_qaa = self.weight_var * tf.constant(
                [1.], dtype=tf.float64) + self.bias_var

            diff = 1.
            prev_qaa_np = 1.
            it = 0
            while diff > 1e-6 and it < 300:
                samp_qaa = interp.interp_lin(self.var_aa_grid, self.qaa_grid,
                                             current_qaa)
                samp_qaa = self.weight_var * samp_qaa + self.bias_var
                current_qaa = samp_qaa

                with tf.Session() as sess:
                    current_qaa_np = sess.run(current_qaa)
                diff = np.abs(current_qaa_np - prev_qaa_np)
                it += 1
                prev_qaa_np = current_qaa_np
            return current_qaa_np, current_qaa