Beispiel #1
0
    def __init__(self,
                 num_hidden,
                 num_input,
                 num_batch=1,
                 w_init=XavierNormal(),
                 b_init=ZeroInit(),
                 has_bias=True,
                 s_init=Uniform(),
                 train_mask=None,
                 **kwargs):
        super(LinearReadout, self).__init__(size=num_hidden, **kwargs)

        # parameters
        self.w_init = w_init
        self.b_init = b_init
        self.s_init = s_init
        self.num_input = num_input
        self.has_bias = has_bias

        # weights
        self.w = math.TrainVar(w_init((num_input, self.num)))
        if has_bias: self.b = math.TrainVar(b_init((self.num, )))

        if train_mask is not None:
            assert train_mask.shape == self.w.shape
            self.train_mask = train_mask

        # variables
        self.s = math.Variable(self.s_init((num_batch, self.num)))
Beispiel #2
0
    def __init__(self,
                 num_hidden,
                 num_input,
                 num_batch,
                 wx=Orthogonal(),
                 wh=Orthogonal(),
                 b=ZeroInit(),
                 h=ZeroInit(),
                 **kwargs):
        super(GRU, self).__init__(num_hidden, num_input, **kwargs)

        self.has_bias = True

        # variables
        self.h = bm.Variable(self.get_param(h, (num_batch, self.num_hidden)))

        # weights
        wxs = self.get_param(wx, (num_input * 3, num_hidden))
        self.w_iz = bm.TrainVar(wxs[:num_input])
        self.w_ir = bm.TrainVar(wxs[num_input:num_input * 2])
        self.w_ia = bm.TrainVar(wxs[num_input * 2:])
        whs = self.get_param(wh, (num_hidden * 3, num_hidden))
        self.w_hz = bm.TrainVar(whs[:num_hidden])
        self.w_hr = bm.TrainVar(whs[num_hidden:num_hidden * 2])
        self.w_ha = bm.TrainVar(whs[num_hidden * 2:])
        bs = self.get_param(b, (num_hidden * 3, ))
        self.bz = bm.TrainVar(bs[:num_hidden])
        self.br = bm.TrainVar(bs[num_hidden:num_hidden * 2])
        self.ba = bm.TrainVar(bs[num_hidden * 2:])
Beispiel #3
0
 def get_param(param, size):
     if param is None:
         return None
     if callable(param):
         return bm.TrainVar(param(size))
     if isinstance(param, onp.ndarray):
         assert param.shape == size
         return bm.TrainVar(bm.asarray(param))
     if isinstance(param, (bm.JaxArray, jnp.ndarray)):
         return bm.TrainVar(param)
     raise ValueError
    def __init__(self,
                 num_input,
                 num_hidden,
                 num_output,
                 num_batch,
                 dt=None,
                 e_ratio=0.8,
                 sigma_rec=0.,
                 seed=None,
                 w_ir=bp.init.KaimingUniform(scale=1.),
                 w_rr=bp.init.KaimingUniform(scale=1.),
                 w_ro=bp.init.KaimingUniform(scale=1.)):
        super(RNN, self).__init__()

        # parameters
        self.tau = 100
        self.num_batch = num_batch
        self.num_input = num_input
        self.num_hidden = num_hidden
        self.num_output = num_output
        self.e_size = int(num_hidden * e_ratio)
        self.i_size = num_hidden - self.e_size
        if dt is None:
            self.alpha = 1
        else:
            self.alpha = dt / self.tau
        self.sigma_rec = (2 * self.alpha)**0.5 * sigma_rec  # Recurrent noise
        self.rng = bm.random.RandomState(seed=seed)

        # hidden mask
        mask = np.tile([1] * self.e_size + [-1] * self.i_size, (num_hidden, 1))
        np.fill_diagonal(mask, 0)
        self.mask = bm.asarray(mask, dtype=bm.float_)

        # input weight
        self.w_ir = self.get_param(w_ir, (num_input, num_hidden))

        # recurrent weight
        bound = 1 / num_hidden**0.5
        self.w_rr = self.get_param(w_rr, (num_hidden, num_hidden))
        self.w_rr[:, :self.e_size] /= (self.e_size / self.i_size)
        self.b_rr = bm.TrainVar(self.rng.uniform(-bound, bound, num_hidden))

        # readout weight
        bound = 1 / self.e_size**0.5
        self.w_ro = self.get_param(w_ro, (self.e_size, num_output))
        self.b_ro = bm.TrainVar(self.rng.uniform(-bound, bound, num_output))

        # variables
        self.h = bm.Variable(bm.zeros((num_batch, num_hidden)))
        self.o = bm.Variable(bm.zeros((num_batch, num_output)))
Beispiel #5
0
    def __init__(self,
                 num_hidden,
                 num_input,
                 num_batch,
                 h=Uniform(),
                 w=XavierNormal(),
                 b=ZeroInit(),
                 **kwargs):
        super(VanillaRNN, self).__init__(num_hidden, num_input, **kwargs)

        # variables
        self.h = bm.Variable(self.get_param(h, (num_batch, self.num_hidden)))

        # weights
        ws = self.get_param(w, (num_input + num_hidden, num_hidden))
        self.w_ir = bm.TrainVar(ws[:num_input])
        self.w_rr = bm.TrainVar(ws[num_input:])
        self.b = self.get_param(b, (num_hidden, ))
Beispiel #6
0
    def __init__(self,
                 num_input,
                 num_hidden,
                 num_output,
                 num_batch,
                 dt=None,
                 seed=None,
                 w_ir=bp.init.KaimingNormal(scale=1.),
                 w_rr=bp.init.KaimingNormal(scale=1.),
                 w_ro=bp.init.KaimingNormal(scale=1.)):
        super(RNN, self).__init__()

        # parameters
        self.tau = 100
        self.num_batch = num_batch
        self.num_input = num_input
        self.num_hidden = num_hidden
        self.num_output = num_output
        if dt is None:
            self.alpha = 1
        else:
            self.alpha = dt / self.tau
        self.rng = bm.random.RandomState(seed=seed)

        # input weight
        self.w_ir = self.get_param(w_ir, (num_input, num_hidden))

        # recurrent weight
        bound = 1 / num_hidden**0.5
        self.w_rr = self.get_param(w_rr, (num_hidden, num_hidden))
        self.b_rr = bm.TrainVar(self.rng.uniform(-bound, bound, num_hidden))

        # readout weight
        self.w_ro = self.get_param(w_ro, (num_hidden, num_output))
        self.b_ro = bm.TrainVar(self.rng.uniform(-bound, bound, num_output))

        # variables
        self.h = bm.Variable(bm.zeros((num_batch, num_hidden)))
        self.o = bm.Variable(bm.zeros((num_batch, num_output)))
Beispiel #7
0
      def __init__(self):
        super(Test, self).__init__()

        self.a = bm.TrainVar(bm.ones(10))
        self.b = bm.TrainVar(bm.random.randn(10))
        self.c = bm.TrainVar(bm.random.uniform(size=10))