Пример #1
0
    def __init__(self, dim, w_param=None, **kwargs):

        super(RNNLM, self).__init__(name=generate_name('hidden', 1),
                                    user_type='kHidden')
        if w_param == None:
            # default: uniform
            w_param = Parameter(name=generate_name('w'), init=init)
        else:
            set_param_field(w_param.param, 'w', True, **kwargs)
        setval(self.layer, param=w_param.param)
Пример #2
0
    def __init__(self, dim, w_param=None, **kwargs):

        super(RNNLM, self).__init__(name=generate_name('hidden', 1),
                                    user_type='kHidden')
        if w_param == None:
            # default: uniform
            w_param = Parameter(name=generate_name('w'), init=init)
        else:
            set_param_field(w_param.param, 'w', True, **kwargs)
        setval(self.layer, param=w_param.param)
Пример #3
0
    def __init__(self, in_dim, out_dim, w_param=None, **kwargs):

        super(Embedding, self).__init__(name=generate_name('embedding', 1),
                                        user_type='kEmbedding')
        fields = {'vocab_size': in_dim, 'word_dim': out_dim}
        setval(self.layer.Extensions[embedding_conf], **fields)
        if w_param == None:
            # default: uniform
            w_param = Parameter(name=generate_name('w'), init=init)
        else:
            set_param_field(w_param.param, 'w', True, **kwargs)
        setval(self.layer, param=w_param.param)
Пример #4
0
    def __init__(self, in_dim, out_dim, w_param=None, **kwargs):

        super(Embedding, self).__init__(name=generate_name('embedding', 1),
                                        user_type='kEmbedding')
        fields = {'vocab_size': in_dim,
                  'word_dim': out_dim}
        setval(self.layer.Extensions[embedding_conf], **fields)
        if w_param == None:
            # default: uniform
            w_param = Parameter(name=generate_name('w'), init=init)
        else:
            set_param_field(w_param.param, 'w', True, **kwargs)
        setval(self.layer, param=w_param.param)
Пример #5
0
    def __init__(self, nb_filter=0, kernel=0, stride=1, pad=0,
                 init=None, w_param=None, b_param=None,
                 activation=None, **kwargs):
        '''
        required
          nb_filter = (int)  // the number of filters
          kernel    = (int)  // the size of filter
        optional
          stride    = (int)  // the size of stride
          pad       = (int)  // the size of padding
          init      = (string)     // 'unirom', 'gaussian', 'constant'
          w_param   = (Parameter)  // Parameter object for weight
          b_param   = (Parameter)  // Parameter object for bias
          **kwargs (KEY=VALUE)
            w_lr = (float) // learning rate multiplier for weight, used to
                           // scale the learning rate when updating parameters.
            w_wd = (float) // weight decay multiplier for weight, used to
                           // scale the weight decay when updating parameters.
            b_lr = (float) // learning rate multiplier for bias 
            b_wd = (float) // weight decay multiplier for bias
        '''

        assert nb_filter > 0 and kernel > 0, 'should be set as positive int'
        super(Convolution2D, self).__init__(name=generate_name('conv', 1),
                                            type=kCConvolution)
        fields = {'num_filters' : nb_filter,
                  'kernel' : kernel,
                  'stride' : stride,
                  'pad' : pad}
        setval(self.layer.convolution_conf, **fields)

        # parameter w
        if w_param == None:
            self.init = 'gaussian' if init == None else init
            w_param = Parameter(init=self.init)
        set_param_field(w_param.param, 'w', True, **kwargs)
        setval(self.layer, param=w_param.param)

        # parameter b
        if b_param == None:
            self.init = 'constant' if init == None else init
            b_param = Parameter(init=self.init) # default: constant
        set_param_field(b_param.param, 'b', True, **kwargs)
        setval(self.layer, param=b_param.param)

        # following layers: e.g., activation, dropout, etc.
        if activation:
            self.mask = Activation(activation=activation).layer
Пример #6
0
    def __init__(self,
                 output_dim=0,
                 activation=None,
                 init=None,
                 w_param=None,
                 b_param=None,
                 input_dim=None,
                 **kwargs):
        '''
        required
          output_dim = (int)
        optional
          activation = (string)
          init       = (string)     // 'uniform', 'gaussian', 'constant'
          w_param    = (Parameter)  // Parameter object for weight
          b_param    = (Parameter)  // Parameter object for bias
          **kwargs
            w_lr = (float) // learning rate multiplier for weight, used to
                           // scale the learning rate when updating parameters.
            w_wd = (float) // weight decay multiplier for weight, used to
                           // scale the weight decay when updating parameters.
            b_lr = (float) // learning rate multiplier for bias 
            b_wd = (float) // weight decay multiplier for bias
        '''
        # required
        assert output_dim > 0, 'output_dim should be set'
        super(Dense, self).__init__(type=kInnerProduct, **kwargs)
        self.layer.innerproduct_conf.num_output = output_dim
        if 'transpose' in kwargs:
            self.layer.innerproduct_conf.transpose = kwargs['transpose']

        # parameter w (default: gaussian)
        if w_param == None:
            self.init = 'gaussian' if init == None else init
            w_param = Parameter(init=self.init)
        set_param_field(w_param.param, 'w', False, **kwargs)
        setval(self.layer, param=w_param.param)

        # parameter b (default: constant)
        if b_param == None:
            self.init = 'constant' if init == None else init
            b_param = Parameter(init=self.init)
        set_param_field(b_param.param, 'b', False, **kwargs)
        setval(self.layer, param=b_param.param)

        # following layers: e.g., activation, dropout, etc.
        if activation:
            self.mask = Activation(activation=activation).layer
Пример #7
0
    def __init__(self, output_dim=0, activation=None,
                 init=None, w_param=None, b_param=None, input_dim=None,
                 **kwargs):
        '''
        required
          output_dim = (int)
        optional
          activation = (string)
          init       = (string)     // 'unirom', 'gaussian', 'constant'
          w_param    = (Parameter)  // Parameter object for weight
          b_param    = (Parameter)  // Parameter object for bias
          **kwargs
            w_lr = (float) // learning rate multiplier for weight, used to
                           // scale the learning rate when updating parameters.
            w_wd = (float) // weight decay multiplier for weight, used to
                           // scale the weight decay when updating parameters.
            b_lr = (float) // learning rate multiplier for bias 
            b_wd = (float) // weight decay multiplier for bias
        '''
        # required
        assert output_dim > 0, 'output_dim should be set'
        super(Dense, self).__init__(type=kInnerProduct, **kwargs)
        self.layer.innerproduct_conf.num_output = output_dim
        if 'transpose' in kwargs:
            self.layer.innerproduct_conf.transpose = kwargs['transpose']

        # parameter w (default: gaussian)
        if w_param == None:
            self.init = 'gaussian' if init == None else init
            w_param = Parameter(init=self.init)
        set_param_field(w_param.param, 'w', False, **kwargs)
        setval(self.layer, param=w_param.param)

        # parameter b (default: constant)
        if b_param == None:
            self.init = 'constant' if init == None else init
            b_param = Parameter(init=self.init)
        set_param_field(b_param.param, 'b', False, **kwargs)
        setval(self.layer, param=b_param.param)

        # following layers: e.g., activation, dropout, etc.
        if activation:
            self.mask = Activation(activation=activation).layer
Пример #8
0
    def __init__(self, out_dim=None, w_param=None, b_param=None,
                 sampling=None, **kwargs):
        '''
        Generate a set of layers (like MLP) according to the number of elements
          in out_dim, and on top of it, two layers RBMVis and RBMHid with
          bidirectional connection
        The layers are expanded and added in Energy.add()

        required
          out_dim  = (int) or (int list) // the number of hidden nodes
        optional
          w_param  = (Parameter)  // Parameter object for weight
          b_param  = (Parameter)  // Parameter object for bias
          sampling = (string)
        '''

        assert out_dim > 0, 'out_dim should be set'
        self.out_dim = [out_dim] if type(out_dim) == int else out_dim

        self.name = kwargs['name'] if 'name' in kwargs else 'RBMVis'
        self.layer_type = kwargs['type'] if 'type' in kwargs else kRBMVis
        super(RBM, self).__init__(name=generate_name(self.name,
                                  withnumber=False), type=self.layer_type)
        setval(self.layer.rbm_conf, hdim=self.out_dim[-1])
        if self.layer_type == kRBMHid and sampling != None:
            if sampling == 'gaussian':
                setval(self.layer.rbm_conf, gaussian=True)

        # parameter w
        if w_param == None:
            w_param = Parameter(init='gaussian', **kwargs)
            set_param_field(w_param.param, 'w', withnumber=False,
                            level=len(self.out_dim), **kwargs)
        else:
            if self.layer_type == kRBMHid:
                del kwargs['name']
            else:
                set_param_field(w_param.param, 'w', withnumber=False,
        	  	        level=len(self.out_dim), **kwargs)
        setval(self.layer, param=w_param.param)

        # parameter b
        if b_param == None:
            b_param = Parameter(init='constant', **kwargs)
            set_param_field(b_param.param, 'b', withnumber=False,
        		    level=len(self.out_dim), **kwargs)
        else:
            if self.layer_type == kRBMHid:
                pass
            else:
                set_param_field(b_param.param, 'b', withnumber=False,
        		        level=len(self.out_dim), **kwargs)
        setval(self.layer, param=b_param.param)

        if self.layer_type == kRBMVis:
            wname = w_param.param.name
            parw = Parameter(name=wname+"_", init='none', share_from=wname)
            bname = b_param.param.name
            parb = Parameter(name=bname+"2", wd=0, init='constant')
            self.bidirect = RBM(self.out_dim, name='RBMHid', type=kRBMHid,
                         w_param=parw, b_param=parb, sampling=sampling).layer
Пример #9
0
    def __init__(self,
                 nb_filter=0,
                 kernel=0,
                 stride=1,
                 pad=0,
                 init=None,
                 w_param=None,
                 b_param=None,
                 activation=None,
                 **kwargs):
        '''
        required
          nb_filter = (int)        // the number of filters
          kernel    = (int/tuple)  // the size of filter
        optional
          stride    = (int/tuple)  // the size of stride
          pad       = (int/tuple)  // the size of padding
          init      = (string)     // 'uniform', 'gaussian', 'constant'
          w_param   = (Parameter)  // Parameter object for weight
          b_param   = (Parameter)  // Parameter object for bias
          **kwargs (KEY=VALUE)
            w_lr = (float) // learning rate multiplier for weight, used to
                           // scale the learning rate when updating parameters.
            w_wd = (float) // weight decay multiplier for weight, used to
                           // scale the weight decay when updating parameters.
            b_lr = (float) // learning rate multiplier for bias 
            b_wd = (float) // weight decay multiplier for bias
        '''

        assert nb_filter > 0, 'nb_filter should be set as positive int'
        super(Convolution2D, self).__init__(name=generate_name('conv', 1),
                                            type=kCConvolution)
        fields = {}
        # for kernel
        if type(kernel) == int:
            fields['kernel'] = kernel
        else:
            fields['kernel_x'] = kernel[0]
            fields['kernel_y'] = kernel[1]
        # for stride
        if type(stride) == int:
            fields['stride'] = stride
        else:
            fields['stride_x'] = stride[0]
            fields['stride_y'] = stride[1]
        # for pad
        if type(pad) == int:
            fields['pad'] = pad
        else:
            fields['pad_x'] = pad[0]
            fields['pad_y'] = pad[1]

        setval(self.layer.convolution_conf, **fields)

        # parameter w
        if w_param == None:
            self.init = 'gaussian' if init == None else init
            w_param = Parameter(init=self.init)
        set_param_field(w_param.param, 'w', True, **kwargs)
        setval(self.layer, param=w_param.param)

        # parameter b
        if b_param == None:
            self.init = 'constant' if init == None else init
            b_param = Parameter(init=self.init)  # default: constant
        set_param_field(b_param.param, 'b', True, **kwargs)
        setval(self.layer, param=b_param.param)

        # following layers: e.g., activation, dropout, etc.
        if activation:
            self.mask = Activation(activation=activation).layer
Пример #10
0
    def __init__(self,
                 out_dim=None,
                 w_param=None,
                 b_param=None,
                 sampling=None,
                 **kwargs):
        '''
        Generate a set of layers (like MLP) according to the number of elements
          in out_dim, and on top of it, two layers RBMVis and RBMHid with
          bidirectional connection
        The layers are expanded and added in Energy.add()

        required
          out_dim  = (int) or (int list) // the number of hidden nodes
        optional
          w_param  = (Parameter)  // Parameter object for weight
          b_param  = (Parameter)  // Parameter object for bias
          sampling = (string)
        '''

        assert out_dim > 0, 'out_dim should be set'
        self.out_dim = [out_dim] if type(out_dim) == int else out_dim

        self.name = kwargs['name'] if 'name' in kwargs else 'RBMVis'
        self.layer_type = kwargs['type'] if 'type' in kwargs else kRBMVis
        super(RBM, self).__init__(name=generate_name(self.name,
                                                     withnumber=False),
                                  type=self.layer_type)
        setval(self.layer.rbm_conf, hdim=self.out_dim[-1])
        if self.layer_type == kRBMHid and sampling != None:
            if sampling == 'gaussian':
                setval(self.layer.rbm_conf, gaussian=True)

        # parameter w
        if w_param == None:
            w_param = Parameter(init='gaussian', **kwargs)
            set_param_field(w_param.param,
                            'w',
                            withnumber=False,
                            level=len(self.out_dim),
                            **kwargs)
        else:
            if self.layer_type == kRBMHid:
                del kwargs['name']
            else:
                set_param_field(w_param.param,
                                'w',
                                withnumber=False,
                                level=len(self.out_dim),
                                **kwargs)
        setval(self.layer, param=w_param.param)

        # parameter b
        if b_param == None:
            b_param = Parameter(init='constant', **kwargs)
            set_param_field(b_param.param,
                            'b',
                            withnumber=False,
                            level=len(self.out_dim),
                            **kwargs)
        else:
            if self.layer_type == kRBMHid:
                pass
            else:
                set_param_field(b_param.param,
                                'b',
                                withnumber=False,
                                level=len(self.out_dim),
                                **kwargs)
        setval(self.layer, param=b_param.param)

        if self.layer_type == kRBMVis:
            wname = w_param.param.name
            parw = Parameter(name=wname + "_", init='none', share_from=wname)
            bname = b_param.param.name
            parb = Parameter(name=bname + "2", wd=0, init='constant')
            self.bidirect = RBM(self.out_dim,
                                name='RBMHid',
                                type=kRBMHid,
                                w_param=parw,
                                b_param=parb,
                                sampling=sampling).layer
Пример #11
0
    def __init__(self, nb_filter=0, kernel=0, stride=1, pad=0,
                 init=None, w_param=None, b_param=None,
                 activation=None, **kwargs):
        '''
        required
          nb_filter = (int)        // the number of filters
          kernel    = (int/tuple)  // the size of filter
        optional
          stride    = (int/tuple)  // the size of stride
          pad       = (int/tuple)  // the size of padding
          init      = (string)     // 'uniform', 'gaussian', 'constant'
          w_param   = (Parameter)  // Parameter object for weight
          b_param   = (Parameter)  // Parameter object for bias
          **kwargs (KEY=VALUE)
            w_lr = (float) // learning rate multiplier for weight, used to
                           // scale the learning rate when updating parameters.
            w_wd = (float) // weight decay multiplier for weight, used to
                           // scale the weight decay when updating parameters.
            b_lr = (float) // learning rate multiplier for bias
            b_wd = (float) // weight decay multiplier for bias
        '''

        assert nb_filter > 0, 'nb_filter should be set as positive int'
        super(Convolution2D, self).__init__(name=generate_name('conv', 1),
                                            type=kCConvolution, **kwargs)
        fields = {"num_filters":nb_filter}
        # for kernel
        if type(kernel) == int:
            fields['kernel'] = kernel
        else:
            fields['kernel_x'] = kernel[0]
            fields['kernel_y'] = kernel[1]
        # for stride
        if type(stride) == int:
            fields['stride'] = stride
        else:
            fields['stride_x'] = stride[0]
            fields['stride_y'] = stride[1]
        # for pad
        if type(pad) == int:
            fields['pad'] = pad
        else:
            fields['pad_x'] = pad[0]
            fields['pad_y'] = pad[1]

        setval(self.layer.convolution_conf, **fields)

        # parameter w
        if w_param == None:
            self.init = 'gaussian' if init == None else init
            w_param = Parameter(init=self.init)
        set_param_field(w_param.param, 'w', True, **kwargs)
        setval(self.layer, param=w_param.param)

        # parameter b
        if b_param == None:
            self.init = 'constant' if init == None else init
            b_param = Parameter(init=self.init) # default: constant
        set_param_field(b_param.param, 'b', True, **kwargs)
        setval(self.layer, param=b_param.param)

        # following layers: e.g., activation, dropout, etc.
        if activation:
            self.mask = Activation(activation=activation).layer