Example #1
0
    def __init__(self, input, n_in):

        self.W = gen_param(name='W', shape=(n_in, ))
        self.b = gen_param(name='b', shape=(1, ))

        self.output = T.dot(input, self.W) + self.b

        self.params = [self.W, self.b]
        self.input = input
Example #2
0
    def __init__(self,
                 input,
                 rng=None,
                 theano_rng=None,
                 n_visible=None,
                 n_hidden=None,
                 W=None,
                 hbias=None,
                 vbias=None,
                 dropout=None,
                 v_activation=T.nnet.sigmoid,
                 h_activation=T.nnet.sigmoid,
                 optimizer=SGD,
                 is_train=0):

        if rng is None:
            rng = np.random.RandomState(888)

        if theano_rng is None:
            theano_rng = RandomStreams(rng.randint(2**30))

        if W is None:
            W = gen_param(name='W', shape=(n_visible, n_hidden), rng=rng)

        if hbias is None:
            hbias = gen_param(name='hbias', shape=(n_hidden, ))

        if vbias is None:
            vbias = gen_param(name='vbias', shape=(n_visible, ))

        self.W = W
        self.hbias = hbias
        self.vbias = vbias

        self.dropout = dropout
        self.is_train = is_train

        self.input = input
        self.theano_rng = theano_rng
        self.n_visible = n_visible
        self.n_hidden = n_hidden

        self.params = [self.W, self.hbias, self.vbias]

        # model updates
        self.opt = optimizer(self.params)

        self.v_activation = v_activation
        self.h_activation = h_activation
Example #3
0
    def __init__(self, input, n_in, n_out, W=None, b=None):

        if W is None:
            W = gen_param(name='W', shape=(n_in, n_out))

        if b is None:
            b = gen_param(name='b', shape=(n_out, ))

        self.W = W
        self.b = b

        self.p_y_given_x = T.nnet.softmax(T.dot(input, self.W) + self.b)
        self.y_pred = T.argmax(self.p_y_given_x, axis=1)

        self.params = [self.W, self.b]
        self.input = input
Example #4
0
    def __init__(self, n_out, rng):
        self._gamma = theano.shared(
            value=np.asarray(
                rng.uniform(
                    low=1.,
                    high=1.,
                    size=(n_out,)
                ),
                dtype=theano.config.floatX
            ),
            name='gamma',
            borrow=True
        )
        self._beta = gen_param(name='beta', shape=(n_out,))

        self.params = [self._gamma, self._beta]
Example #5
0
    def __init__(self, input, rng, theano_rng, n_in, n_out, W=None, b=None,
                 activation=None, dropout=None, dropconnect=None,
                 is_train=0):

        self.params = []
        if W is None:
            W = gen_param(name='W', shape=(n_in, n_out), rng=rng)

        if b is None:
            b = gen_param(name='b', shape=(n_out,))

        self.W = W
        self.b = b

        if (dropconnect is None) or (dropconnect == 0.):
            lin_output = T.dot(input, self.W) + self.b

            # bn = BN(n_out, rng)
            # self.params.extend(bn.params)
            # output = activation(bn.batch_norm(lin_output))

            output = activation(lin_output)

            self.consider_constant = None

        else:
            output = theano.ifelse.ifelse(
                condition=T.eq(is_train, 1),
                then_branch=activation(T.dot(input, T.switch(
                    theano_rng.binomial(
                        size=(n_in, n_out),
                        p=(1.-dropconnect),
                        dtype=theano.config.floatX
                    ),
                    self.W, 0.
                )) + self.b),
                else_branch=activation(
                    T.dot(input, (1.-dropconnect)*self.W) + self.b)
                # else_branch=activation(
                #   T.mean(normal_sample, axis=0) + self.b)
            )
            self.consider_constant = None

        if (dropout is not None) and (dropout > 0.):
            output = theano.ifelse.ifelse(
                condition=T.eq(is_train, 1),
                then_branch=T.switch(
                    theano_rng.binomial(
                        size=(n_out,),
                        p=(1.-dropout),
                        dtype=theano.config.floatX
                    ),
                    output, 0.
                ),
                else_branch=output*(1.-dropout)
            )

        self.output = output

        # self.params = [self.W, self.b]
        self.params.extend([self.W, self.b])
        self.input = input
        self.n_in = n_in
        self.n_out = n_out
        self.is_train = is_train