def __init__(self, in_size, out_size, bias=0, nobias=False, initialW=None, initial_bias=None, normalized=False): super(SequenceLinear, self).__init__(W=(in_size, out_size)) if initialW is None: initialW = weight_init.uniform((in_size, out_size)) self.W.data[...] = initialW if normalized: self.add_param('gamma', (out_size, )) self.add_param('beta', (out_size, )) self.gamma.data[...] = numpy.ones((out_size, ), dtype=numpy.float32) self.beta.data[...] = numpy.zeros((out_size, ), dtype=numpy.float32) nobias = True if nobias: self.b = None else: self.add_param('b', out_size) if initial_bias is None: initial_bias = bias self.b.data[...] = initial_bias self.add_persistent('normalized', normalized)
def __init__(self, in_size, out_size, bias=0, nobias=False, W_x=None, W_h=None, initial_bias=None, reverse=False, normalized=False, stateful=False): super(SequenceLSTM, self).__init__(W_x=(in_size, 4 * out_size), W_h=(out_size, 4 * out_size)) if W_x is None: W_x = weight_init.uniform((in_size, 4 * out_size), low=-0.04, high=0.04) if W_h is None: W_h = weight_init.uniform((out_size, 4 * out_size), low=-0.04, high=0.04) self.W_x.data[...] = W_x self.W_h.data[...] = W_h if normalized: self.add_param('gamma', (4 * out_size, )) self.add_param('beta', (4 * out_size, )) self.gamma.data[...] = numpy.ones((4 * out_size, ), dtype=numpy.float32) self.beta.data[...] = numpy.zeros((4 * out_size, ), dtype=numpy.float32) nobias = True if nobias: self.b = None else: self.add_param('b', 4 * out_size) if initial_bias is None: initial_bias = bias self.b.data[...] = initial_bias self.add_persistent('reverse', reverse) self.add_persistent('normalized', normalized) self.add_persistent('stateful', stateful) self.reset_states()
def __init__(self, in_size, out_size, bias=0, nobias=False, W_x=None, W_h=None, initial_bias=None, reverse=False, normalized=False, stateful=False): super(SequenceLSTM, self).__init__(W_x=(in_size, 4 * out_size), W_h=(out_size, 4 * out_size)) if W_x is None: W_x = weight_init.uniform((in_size, 4 * out_size), low=-0.04, high=0.04) if W_h is None: W_h = weight_init.uniform((out_size, 4 * out_size), low=-0.04, high=0.04) self.W_x.data[...] = W_x self.W_h.data[...] = W_h if normalized: self.add_param('gamma', (4 * out_size,)) self.add_param('beta', (4 * out_size,)) self.gamma.data[...] = numpy.ones((4 * out_size,), dtype=numpy.float32) self.beta.data[...] = numpy.zeros((4 * out_size,), dtype=numpy.float32) nobias = True if nobias: self.b = None else: self.add_param('b', 4 * out_size) if initial_bias is None: initial_bias = bias self.b.data[...] = initial_bias self.add_persistent('reverse', reverse) self.add_persistent('normalized', normalized) self.add_persistent('stateful', stateful) self.reset_states()
def __init__(self, in_size, out_size, bias=0, nobias=False, initialW=None, initial_bias=None, normalized=False): super(SequenceLinear, self).__init__(W=(in_size, out_size)) if initialW is None: initialW = weight_init.uniform((in_size, out_size)) self.W.data[...] = initialW if normalized: self.add_param('gamma', (out_size,)) self.add_param('beta', (out_size,)) self.gamma.data[...] = numpy.ones((out_size,), dtype=numpy.float32) self.beta.data[...] = numpy.zeros((out_size,), dtype=numpy.float32) nobias = True if nobias: self.b = None else: self.add_param('b', out_size) if initial_bias is None: initial_bias = bias self.b.data[...] = initial_bias self.add_persistent('normalized', normalized)