示例#1
0
    def _create_parameters(self):
        """
        Instantiate the parameters of the network
        :return:
        """
        self.extra, self.extra_inf = OrderedDict(), OrderedDict()

        # input layer
        gin = self.layer(self.dimx + 1, self.dimh[0], 'in', priors=self.priors, N=self.N, nonlin=self.nonlinearity,
                         type_init=self.type_init, n_inducing=self.n_inducing, noise_lvl=self.ind_noise_lvl)
        layers = [gin]
        # remaining hidden layers
        self.hidden_params = []
        for i, h in enumerate(self.dimh[1:]):
            gh = self.layer(self.dimh[i] + 1, h, 'h' + str(i + 1), priors=self.priors, N=self.N, nonlin=self.nonlinearity,
                            type_init=self.type_init, n_inducing=self.n_inducing, noise_lvl=self.ind_noise_lvl)
            layers.append(gh)
        gout = self.layer(self.dimh[-1] + 1, self.dimy, 'out', priors=self.priors, nonlin='linear',
                          type_init=self.type_init, N=self.N, n_inducing=self.n_inducing, noise_lvl=self.ind_noise_lvl)
        layers.append(gout)

        if self.task_type == 'regression':
            a1, b1 = nnu.multvector(self.dimy, np.log(a0), 'out_a1'), nnu.multvector(self.dimy, np.log(b0), 'out_b1')
            a1inf, b1inf = nnu.multvector(self.dimy, np.log(a0), 'out_a1_inf'), nnu.multvector(self.dimy, np.log(b0),
                                                                                               'out_b1_inf')
            self.extra['a1'] = a1; self.extra['b1'] = b1
            self.extra_inf['a1'] = a1inf; self.extra_inf['b1'] = b1inf

        self.layers = layers
        self.layers_inf = [copy(layer) for layer in layers]
        for layeri in self.layers_inf:
            layeri.N = self.N_valid
示例#2
0
    def _create_parameters(self):
        """
        Instantiate the parameters of the network
        :return:
        """
        self.extra, self.extra_inf = OrderedDict(), OrderedDict()

        # input layer
        gin = self.layer(self.dimx + 1,
                         self.dimh[0],
                         'in',
                         priors=self.priors,
                         N=self.N,
                         nonlin=self.nonlinearity,
                         type_init=self.type_init,
                         n_inducing=self.n_inducing,
                         noise_lvl=self.ind_noise_lvl)
        layers = [gin]
        # remaining hidden layers
        self.hidden_params = []
        for i, h in enumerate(self.dimh[1:]):
            gh = self.layer(self.dimh[i] + 1,
                            h,
                            'h' + str(i + 1),
                            priors=self.priors,
                            N=self.N,
                            nonlin=self.nonlinearity,
                            type_init=self.type_init,
                            n_inducing=self.n_inducing,
                            noise_lvl=self.ind_noise_lvl)
            layers.append(gh)
        gout = self.layer(self.dimh[-1] + 1,
                          self.dimy,
                          'out',
                          priors=self.priors,
                          nonlin='linear',
                          type_init=self.type_init,
                          N=self.N,
                          n_inducing=self.n_inducing,
                          noise_lvl=self.ind_noise_lvl)
        layers.append(gout)

        if self.task_type == 'regression':
            a1, b1 = nnu.multvector(self.dimy, np.log(a0),
                                    'out_a1'), nnu.multvector(
                                        self.dimy, np.log(b0), 'out_b1')
            a1inf, b1inf = nnu.multvector(self.dimy, np.log(a0),
                                          'out_a1_inf'), nnu.multvector(
                                              self.dimy, np.log(b0),
                                              'out_b1_inf')
            self.extra['a1'] = a1
            self.extra['b1'] = b1
            self.extra_inf['a1'] = a1inf
            self.extra_inf['b1'] = b1inf

        self.layers = layers
        self.layers_inf = [copy(layer) for layer in layers]
        for layeri in self.layers_inf:
            layeri.N = self.N_valid
示例#3
0
    def __init__(self, dim_in, dim_out, name, priors=(0., 0., 0.), N=1, nonlin='relu', type_init='he2', n_inducing=50,
                 noise_lvl=0.01):
        sigma_in = 0.01
        params = [nnu.randmat(dim_in, dim_out, 'mu_' + name, type_init=type_init, type_dist='normal'),
                  nnu.randvector(dim_in, 'sigma_row_mgauss_' + name, sigma=sigma_in),
                  nnu.randvector(dim_out, 'sigma_col_mgauss_' + name, sigma=sigma_in),
                  nnu.randmat2(n_inducing, dim_in, 'inducing_x_' + name, sigma=sigma_in, type_dist='uniform'),
                  nnu.randmat2(n_inducing, dim_out, 'inducing_y_' + name, sigma=sigma_in, type_dist='uniform'),
                  nnu.multvector(dim_in, np.log(np.sqrt(noise_lvl)), name='dropout_alpha_ffdrop_x_' + name),
                  nnu.multvector(dim_out, np.log(np.sqrt(noise_lvl)), name='dropout_alpha_ffdrop_y_' + name),
                  nnu.multvector(n_inducing, np.log(np.sqrt(noise_lvl)), name='dropout_alpha_ffdrop_pd_' + name)]

        self.dim_in = dim_in
        self.dim_out = dim_out
        self.name = name
        self.type_init = type_init
        self.n_inducing = n_inducing
        self.noise_lvl = noise_lvl

        super(MatrixGaussDiagLayerFF, self).__init__(params, N=N, nonlin=nonlin, priors=priors)
示例#4
0
    def __init__(self, dim_in, dim_out, name, priors=(0., 1., 1.), N=1, nonlin='relu', type_init='he2', n_inducing=50,
                 noise_lvl=0.01):
        sigma_in = 0.01
        self.a0, self.b0 = 1., .5
        params = [nnu.randmat(dim_in, dim_out, 'mu_' + name, type_init=type_init),
                  nnu.randvector(dim_in, 'sigma_row_mgauss_' + name, sigma=sigma_in),
                  nnu.randvector(dim_out, 'sigma_col_mgauss_' + name, sigma=sigma_in),
                  nnu.randmat2(n_inducing, dim_in, 'inducing_x_' + name, sigma=sigma_in, type_dist='uniform'),
                  nnu.randmat2(n_inducing, dim_out, 'inducing_y_' + name, sigma=sigma_in, type_dist='uniform'),
                  nnu.multvector(1, np.log(self.a0), 'row_a_' + name), nnu.multvector(1, np.log(self.b0), 'row_b_' + name),
                  nnu.multvector(1, np.log(self.a0), 'col_a_' + name), nnu.multvector(1, np.log(self.b0), 'col_b_' + name),
                  nnu.tscalar(np.log(np.sqrt(noise_lvl)), 'scalar_dropout_alpha_x_' + name),
                  nnu.tscalar(np.log(np.sqrt(noise_lvl)), 'scalar_dropout_alpha_y_' + name)]

        self.dim_in = dim_in
        self.dim_out = dim_out
        self.name = name
        self.type_init = type_init
        self.n_inducing = n_inducing
        self.noise_lvl = noise_lvl
        super(MatrixGaussDiagLayerLearnP, self).__init__(params, N=N, nonlin=nonlin, priors=priors)