Ejemplo n.º 1
0
    def __init__(self,
                 kernel_size,
                 kernel_number,
                 input_size,
                 pad=0,
                 causal=True,
                 dilation=1,
                 weight_init=Uniform(),
                 name="",
                 keepdims=False,
                 stride=1):
        w_shape = kernel_number, input_size, kernel_size, 1
        b_shape = kernel_number

        self.causal = causal
        self.dilation = dilation
        self.stride = stride
        if kernel_size == 1:
            self.causal = False
        mask = []
        for i in xrange(kernel_size / 2):
            mask.append(0)
        for i in xrange(kernel_size / 2 + 1):
            mask.append(1)

        mask = numpy.asarray(mask, dtype=theano.config.floatX)
        self.mask = theano.shared(mask)

        self.w = theano.shared(weight_init(w_shape), name="%s_W" % name)
        self.b = theano.shared(weight_init(b_shape), name="%s_b" % name)

        self.params = [self.w, self.b]
        self.keepdims = keepdims
        self.pad = pad
Ejemplo n.º 2
0
    def __init__(self,
                 num_channels,
                 num_filters,
                 filter_size,
                 stride=1,
                 causal=False,
                 pad=0,
                 weight_init=Uniform(),
                 name="",
                 output_sz=-1):
        self.num_filters = num_filters
        self.filter_size = filter_size
        self.stride = stride
        self.pad = pad
        self.causal = causal
        self.output_sz = output_sz
        mask = []
        for i in xrange(filter_size / 2 + 1):
            mask.append(1)
        for i in xrange(filter_size / 2):
            mask.append(0)
        mask = numpy.asarray(mask, dtype=theano.config.floatX)
        self.mask = theano.shared(mask)

        w_shape = (num_channels, self.num_filters, self.filter_size, 1)
        self.W = theano.shared(weight_init(w_shape), name=name + "_W")
        self.b = theano.shared(weight_init(num_filters), name=name + "_b")

        self.params = [self.W, self.b]
Ejemplo n.º 3
0
    def __init__(self,
                 kernel_size,
                 input_size,
                 causal=True,
                 dilation=1,
                 weight_init=Uniform(),
                 name="",
                 keepdims=False,
                 p=0.0):
        from nn.normalization import LayerNormalization

        assert kernel_size == 3

        self.conv = Sequential([
            Convolution1d(kernel_size,
                          input_size * 3,
                          input_size,
                          pad=dilation,
                          causal=causal,
                          dilation=dilation,
                          weight_init=weight_init,
                          name=name,
                          keepdims=keepdims),
            BatchNormalization(input_size * 3, name=name + "_bn"),
        ])
        self.dropout = Dropout(p)
        self.input_size = input_size
        self.params = self.conv.params
Ejemplo n.º 4
0
 def __init__(self,
              input_size,
              output_size,
              weight_init=Uniform(),
              learnable=True,
              name=""):
     self.W = theano.shared(weight_init((input_size, output_size)),
                            name=name + "_W")
     if learnable:
         self.params = [self.W]
     else:
         self.params = []
Ejemplo n.º 5
0
    def __init__(self,
                 input_size,
                 output_size,
                 weight_init=Uniform(),
                 name="",
                 biases=True):
        self.W = theano.shared(weight_init((input_size, output_size)),
                               name="%s_W" % name)
        self.b = None
        self.params = [self.W]

        if biases:
            self.b = theano.shared(weight_init(output_size),
                                   name="%s_b" % name)
            self.params.append(self.b)
Ejemplo n.º 6
0
 def _set_param(shape, init_type=None, name=None):
     if init_type == 'zero':
         init = Zero()
     elif init_type == 'one':
         init = One()
     elif init_type == 'xavier':
         init = Xavier()
     elif init_type == 'orth':
         init = Orthonormal()
     elif init_type == 'identity':
         init = Identity()
     elif init_type == 'uniform':
         init = Uniform()
     else:
         init = Normal()
     return init(shape=shape, name=name)
Ejemplo n.º 7
0
    def __init__(self, input_size, layer_size, batch_size, p=0.0,
                 name="", activation=T.tanh, weight_init=Uniform(), persistent=False):
        self.h = theano.shared(numpy.zeros((batch_size, layer_size), dtype=theano.config.floatX), name=name+"_h_init")

        self.preact = Sequential([
            Linear(input_size+layer_size, layer_size, weight_init=weight_init, name=name+"_fc"),
            LayerNormalization(layer_size, name=name+"_ln"),
            activation,
            Dropout(p)
        ])
        self.params = self.preact.params

        self.activation = activation
        self.batch_size = batch_size
        self.layer_size = layer_size
        self.input_size = input_size
        self.persistent = persistent
Ejemplo n.º 8
0
    def __init__(self,
                 kernel_size,
                 kernel_number,
                 input_size,
                 causal=True,
                 dilation=1,
                 weight_init=Uniform(),
                 name="",
                 keepdims=False):

        assert kernel_number % 2 == 0
        assert kernel_size == 3

        self.conv = Sequential([
            Convolution1d(kernel_size,
                          kernel_number,
                          input_size,
                          pad=dilation,
                          causal=causal,
                          dilation=dilation,
                          weight_init=weight_init,
                          name=name,
                          keepdims=keepdims),
            BatchNormalization(kernel_number, collect=False,
                               name=name + "_bn"),
            Gated(),
            Convolution1d(1,
                          input_size,
                          kernel_number / 2,
                          pad=0,
                          causal=causal,
                          keepdims=keepdims,
                          weight_init=weight_init,
                          name=name + "_1x1"),
        ])
        self.params = self.conv.params
Ejemplo n.º 9
0
    def __init__(self, input_size, layer_size, batch_size=1, name="", p=0.0, weight_init=Uniform(),
                 inner_activation=Sigmoid(), activation=Tanh(), persistent=False):
        self.activation = activation
        self.inner_activation = inner_activation
        self.layer_size = layer_size
        self.batch_size = batch_size
        self.persistent = persistent
        self.h = theano.shared(numpy.zeros((batch_size, layer_size), dtype=theano.config.floatX), name=name + "_h_init")

        self.rz = Sequential([
            Linear(input_size+layer_size, layer_size * 2, weight_init=weight_init, name=name+"_r"),
            LayerNormalization(layer_size * 2, name=name+"_ln_r"),
            inner_activation
        ])
        self.g = Sequential([
            Linear(input_size+layer_size, layer_size, weight_init=weight_init, name=name+"_g"),
            LayerNormalization(layer_size, name=name+"_ln_g"),
            activation,
            Dropout(p)
        ])

        self.params = self.rz.params + self.g.params
Ejemplo n.º 10
0
    def __init__(self, input_size, layer_size, batch_size=1, p=0.0,
                 name="", activation=T.tanh, inner_activation=T.nnet.sigmoid, weight_init=Uniform(), persistent=False):

        self.h = theano.shared(numpy.zeros((batch_size, layer_size), dtype=theano.config.floatX), name=name+"_h_init")
        self.c = theano.shared(numpy.zeros((batch_size, layer_size), dtype=theano.config.floatX), name=name+"_c_init")

        self.params = []
        self.preact = Sequential([
            Linear(input_size+layer_size, layer_size * 4, weight_init=weight_init, name=name+"_ifog"),
            LayerNormalization(layer_size * 4, name=name + "_ln")
        ])
        self.params = self.preact.params

        self.dropout = Dropout(p)

        self.updates = []
        self.activation = activation
        self.inner_activation = inner_activation
        self.batch_size = batch_size
        self.layer_size = layer_size
        self.persistent = persistent