def __init__(self, dim, w_param=None, **kwargs):

        super(RNNLM, self).__init__(name=generate_name('hidden', 1),
                                    user_type='kHidden')
        if w_param == None:
            # default: uniform
            w_param = Parameter(name=generate_name('w'), init=init)
        else:
            set_param_field(w_param.param, 'w', True, **kwargs)
        setval(self.layer, param=w_param.param)
    def __init__(self, **kwargs):

        super(UserLossRNNLM, self).__init__(name=generate_name('loss', 1),
                                            user_type='kLoss')
        self.layer.Extensions[loss_conf].nclass = kwargs['nclass']
        self.layer.Extensions[loss_conf].vocab_size = kwargs['vocab_size']
        setval(self.layer, param=Parameter(name=generate_name('w'),
                                           init='uniform', scale=0.3).param)
        setval(self.layer, param=Parameter(name=generate_name('w', 1),
                                           init='uniform', scale=0.3).param)
Beispiel #3
0
    def __init__(self, **kwargs):

        super(UserLossRNNLM, self).__init__(name=generate_name('loss', 1),
                                            user_type='kLoss')
        self.layer.Extensions[loss_conf].nclass = kwargs['nclass']
        self.layer.Extensions[loss_conf].vocab_size = kwargs['vocab_size']
        setval(self.layer, param=Parameter(name=generate_name('w'),
                                           init='uniform', scale=0.3).param)
        setval(self.layer, param=Parameter(name=generate_name('w', 1),
                                           init='uniform', scale=0.3).param)
Beispiel #4
0
    def __init__(self, dim, w_param=None, **kwargs):

        super(RNNLM, self).__init__(name=generate_name('hidden', 1),
                                    user_type='kHidden')
        if w_param == None:
            # default: uniform
            w_param = Parameter(name=generate_name('w'), init=init)
        else:
            set_param_field(w_param.param, 'w', True, **kwargs)
        setval(self.layer, param=w_param.param)
Beispiel #5
0
    def __init__(self, in_dim, out_dim, w_param=None, **kwargs):

        super(Embedding, self).__init__(name=generate_name('embedding', 1),
                                        user_type='kEmbedding')
        fields = {'vocab_size': in_dim, 'word_dim': out_dim}
        setval(self.layer.Extensions[embedding_conf], **fields)
        if w_param == None:
            # default: uniform
            w_param = Parameter(name=generate_name('w'), init=init)
        else:
            set_param_field(w_param.param, 'w', True, **kwargs)
        setval(self.layer, param=w_param.param)
    def __init__(self, in_dim, out_dim, w_param=None, **kwargs):

        super(Embedding, self).__init__(name=generate_name('embedding', 1),
                                        user_type='kEmbedding')
        fields = {'vocab_size': in_dim,
                  'word_dim': out_dim}
        setval(self.layer.Extensions[embedding_conf], **fields)
        if w_param == None:
            # default: uniform
            w_param = Parameter(name=generate_name('w'), init=init)
        else:
            set_param_field(w_param.param, 'w', True, **kwargs)
        setval(self.layer, param=w_param.param)
Beispiel #7
0
    def __init__(self, pool_size=None, stride=1, ignore_border=True, **kwargs):
        '''
        required
          pool_size     = (int|tuple) // size for pooling
        optional
          stride        = (int)       // size of striding
          ignore_border = (bool)      // flag for padding
          **kwargs                    // fields for Layer class
        '''

        assert pool_size != None, 'pool_size is required'
        if type(pool_size) == int:
            pool_size = (pool_size, pool_size)
        assert type(pool_size) == tuple and pool_size[0] == pool_size[1], \
               'currently pool size should be square in Singa'
        super(AvgPooling2D, self).__init__(name=generate_name('pool'),
                                           type=kCPooling,
                                           **kwargs)
        self.layer.pooling_conf.pool = PoolingProto().AVG
        fields = {
            'pool': PoolingProto().AVG,
            'kernel': pool_size[0],
            'stride': stride,
            'pad': 0 if ignore_border else 1
        }
        setval(self.layer.pooling_conf, **fields)
    def __init__(self, pool_size=None,
                 stride=1, ignore_border=True, **kwargs):
        '''
        Max Pooling layer

        required
          pool_size     = (int|tuple) // the size for pooling
        optional
          stride        = (int)       // the size of striding
          ignore_border = (bool)      // flag for padding
          **kwargs                    // fields for Layer class
        '''

        assert pool_size != None, 'pool_size is required'
        if type(pool_size) == int:
            pool_size = (pool_size, pool_size)
        assert type(pool_size) == tuple and pool_size[0] == pool_size[1], \
               'pool size should be square in Singa'
        super(MaxPooling2D, self).__init__(name=generate_name('pool'),
                                           type=kCPooling, **kwargs)
        fields = {'pool' : PoolingProto().MAX,
                  'kernel' : pool_size[0],
                  'stride' : stride,
                  'pad' : 0 if ignore_border else 1}
        setval(self.layer.pooling_conf, **fields)
Beispiel #9
0
    def __init__(self):
        '''
        '''

        self.name = 'accuracy'
        self.layer_type = enumLayerType(self.name)
        super(Accuracy, self).__init__(name=generate_name(self.name),
                                       type=self.layer_type)
Beispiel #10
0
    def __init__(self):
        '''
        '''

        self.name = 'accuracy'
        self.layer_type = enumLayerType(self.name)
        super(Accuracy, self).__init__(name=generate_name(self.name),
                                       type=self.layer_type)
def set_param_field(param, pname, changename=False, withnumber=True, **kwargs):
    '''
      param      = (ParamProto)
      pname      = (string)     // 'w' for wiehgt, or 'b' for bias
      changename = (bool)       // update parameter name if True
      withnumber = (bool)       // add layer number if True
      **kwargs
        w_lr = (float) // learning rate multiplier for weight, used to
                       // scale the learning rate when updating parameters.
        w_wd = (float) // weight decay multiplier for weight, used to
                       // scale the weight decay when updating parameters.
        b_lr = (float) // learning rate multiplier for bias 
        b_wd = (float) // weight decay multiplier for bias
    '''
    assert pname == 'w' or pname == 'b', 'pname should be w or b'

    lr_ = param.lr_scale
    wd_ = param.wd_scale
    initkv = {}

    if pname == 'w':
        if 'w_lr' in kwargs:
            lr_ = kwargs['w_lr']
            del kwargs['w_lr']
        if 'w_wd' in kwargs:
            wd_ = kwargs['w_wd']
            del kwargs['w_wd']
        for key, val in kwargs.items():
            if key.startswith('w_'):
                initkv[key[2:]] = val

    elif pname == 'b':
        if 'b_lr' in kwargs:
            lr_ = kwargs['b_lr']
            del kwargs['b_lr']
        if 'b_wd' in kwargs:
            wd_ = kwargs['b_wd']
            del kwargs['b_wd']
        for key, val in kwargs.items():
            if key.startswith('b_'):
                initkv[key[2:]] = val

    field = {'lr_scale': lr_, 'wd_scale': wd_}

    # Set/update parameter fields
    if param.name.startswith('param') or changename == True:
        if 'level' in kwargs:  # parameter name for RBM
            pname += str(kwargs['level'])
        setval(param,
               name=generate_name(pname, withnumber=withnumber),
               **field)
    else:
        setval(param, **field)

    # Set/update parameter init fields
    setval(param.init, **initkv)
Beispiel #12
0
 def __init__(self, lossname, topk=1, **kwargs):
     '''
     required
       lossname = (string) // softmaxloss, euclideanloss
     '''
     self.layer_type = enumLayerType(lossname)
     super(Loss, self).__init__(name=generate_name(lossname),
                                      type=self.layer_type, **kwargs)
     if lossname == 'softmaxloss':
         self.layer.softmaxloss_conf.topk = topk
Beispiel #13
0
    def __init__(self,
                 load,
                 phase='train',
                 checkpoint=None,
                 conf=None,
                 **kwargs):
        '''
        required
          load       = (string)  // type of data
        optional
          phase      = (string)  // phase of data layer
          checkpoint = (string)  // checkpoint path
          conf       = (Store)   // Store object
          **kwargs (KEY=VALUE)
            partition_dim = (int)  // partition dimension for net
        '''

        assert load != None, 'data type should be specified'
        if load == 'kData':
            super(Data, self).__init__(name=generate_name('data'),
                                       user_type=load,
                                       **kwargs)
        else:
            self.layer_type = enumLayerType(load)
            super(Data, self).__init__(name=generate_name('data'),
                                       type=self.layer_type,
                                       **kwargs)
        self.is_datalayer = True

        # include/exclude
        setval(self.layer, include=enumPhase(phase))
        #setval(self.layer, exclude=kTest if phase=='train' else kTrain)

        if conf == None:
            if load == 'kData':
                setval(self.layer.Extensions[data_conf], **kwargs)
            else:
                setval(self.layer.store_conf, **kwargs)
        else:
            setval(self.layer, store_conf=conf.proto)

        self.checkpoint = checkpoint  # checkpoint for training data
Beispiel #14
0
    def __init__(self, ratio=0.5):
        '''
        required
          ratio = (float) // ratio of drop out nodes
        '''

        self.name = 'dropout'
        self.layer_type = enumLayerType(self.name)
        super(Dropout, self).__init__(name=generate_name(self.name),
                                      type=self.layer_type)
        self.layer.dropout_conf.dropout_ratio = ratio
    def __init__(self, ratio=0.5):
        '''
        required
          ratio = (float) // ratio of drop out nodes
        '''

        self.name = 'dropout'
        self.layer_type = enumLayerType(self.name)
        super(Dropout, self).__init__(name=generate_name(self.name),
                                      type=self.layer_type)
        self.layer.dropout_conf.dropout_ratio = ratio
Beispiel #16
0
 def __init__(self, lossname, topk=1, **kwargs):
     '''
     required
       lossname = (string) // softmaxloss, euclideanloss
     '''
     self.layer_type = enumLayerType(lossname)
     super(Loss, self).__init__(name=generate_name(lossname),
                                type=self.layer_type,
                                **kwargs)
     if lossname == 'softmaxloss':
         self.layer.softmaxloss_conf.topk = topk
Beispiel #17
0
def set_param_field(param, pname, changename=False, withnumber=True, **kwargs):
    '''
      param      = (ParamProto)
      pname      = (string)     // 'w' for wiehgt, or 'b' for bias
      changename = (bool)       // update parameter name if True
      withnumber = (bool)       // add layer number if True
      **kwargs
        w_lr = (float) // learning rate multiplier for weight, used to
                       // scale the learning rate when updating parameters.
        w_wd = (float) // weight decay multiplier for weight, used to
                       // scale the weight decay when updating parameters.
        b_lr = (float) // learning rate multiplier for bias 
        b_wd = (float) // weight decay multiplier for bias
    '''
    assert pname == 'w' or pname == 'b', 'pname should be w or b'

    lr_ = param.lr_scale
    wd_ = param.wd_scale
    initkv = {}

    if pname == 'w':
        if 'w_lr' in kwargs:
            lr_ = kwargs['w_lr']
            del kwargs['w_lr']
        if 'w_wd' in kwargs:
            wd_ = kwargs['w_wd']
            del kwargs['w_wd']
        for key, val in kwargs.items():
            if key.startswith('w_'):
                initkv[key[2:]] = val

    elif pname == 'b':
        if 'b_lr' in kwargs:
            lr_ = kwargs['b_lr']
            del kwargs['b_lr']
        if 'b_wd' in kwargs:
            wd_ = kwargs['b_wd']
            del kwargs['b_wd']
        for key, val in kwargs.items():
            if key.startswith('b_'):
                initkv[key[2:]] = val

    field = {'lr_scale' : lr_, 'wd_scale' : wd_}

    # Set/update parameter fields
    if param.name.startswith('param') or changename == True:
        if 'level' in kwargs:  # parameter name for RBM
            pname += str(kwargs['level'])
        setval(param, name=generate_name(pname, withnumber=withnumber), **field)
    else:
        setval(param, **field)

    # Set/update parameter init fields
    setval(param.init, **initkv)
Beispiel #18
0
    def __init__(self, meanfile=None, **kwargs):
        '''
        required
          meanfile = (string) // path to meanfile (depreciated)
        '''

        assert meanfile != None, 'meanfile should be specified'
        self.name = 'rgb'
        self.layer_type = kRGBImage
        super(RGB, self).__init__(name=generate_name(self.name),
                                  type=self.layer_type)
        self.layer.rgbimage_conf.meanfile = meanfile
    def __init__(self, meanfile=None, **kwargs):
        '''
        required
          meanfile = (string) // path to meanfile (depreciated)
        '''

        assert meanfile != None, 'meanfile should be specified'
        self.name = 'rgb'
        self.layer_type = kRGBImage
        super(RGB, self).__init__(name=generate_name(self.name),
                                  type=self.layer_type)
        self.layer.rgbimage_conf.meanfile = meanfile
Beispiel #20
0
    def __init__(self, size=0, **kwargs):
        '''
        required
          size = (int)  // local size
        '''

        super(LRN2D, self).__init__(name=generate_name('norm'), type=kLRN)
        # required
        assert size != 0, 'local size should be set'
        self.layer.lrn_conf.local_size = size
        init_values = get_init_values('lrn2d', **kwargs)
        setval(self.layer.lrn_conf, **init_values)
    def __init__(self, size=0, **kwargs):
        '''
        required
          size = (int)  // local size
        '''

        super(LRN2D, self).__init__(name=generate_name('norm'), type=kLRN)
        # required
        assert size != 0, 'local size should be set'
        self.layer.lrn_conf.local_size = size
        init_values = get_init_values('lrn2d', **kwargs)
        setval(self.layer.lrn_conf, **init_values)
Beispiel #22
0
    def __init__(self, **kwargs):
        '''
        **kwargs (KEY=VALUE)
          partition_dim = (int)  // partition dimension for net
        '''

        self.layer = Message('Layer', **kwargs).proto
        # required
        if not 'name' in kwargs:
            setval(self.layer, name=generate_name('layer', 1))

        # srclayers are set in Model.build()
        self.is_datalayer = False
    def __init__(self, **kwargs):
        '''
        **kwargs (KEY=VALUE)
          partition_dim = (int)  // partition dimension for net
        '''

        self.layer = Message('Layer', **kwargs).proto
        # required
        if not 'name' in kwargs:
            setval(self.layer, name=generate_name('layer', 1))

        # srclayers are set in Model.build()
        self.is_datalayer = False
    def __init__(self, activation='stanh', topk=1):
        '''
        required
          activation = (string)
        optional
          topk       = (int)  // the number of results
        '''

        self.name = activation
        if activation == 'tanh': activation = 'stanh' # <-- better way to set?
        self.layer_type = enumLayerType(activation)
        super(Activation, self).__init__(name=generate_name(self.name),
                                         type=self.layer_type)
        if activation == 'softmaxloss':
            self.layer.softmaxloss_conf.topk = topk
    def __init__(self, load, phase='train', checkpoint=None,
                 conf=None, **kwargs):
        '''
        required
          load       = (string)  // type of data
        optional
          phase      = (string)  // phase of data layer
          checkpoint = (string)  // checkpoint path
          conf       = (Store)   // Store object
          **kwargs (KEY=VALUE)
            partition_dim = (int)  // partition dimension for net
        '''

        assert load != None, 'data type should be specified'
        if load == 'kData':
            super(Data, self).__init__(name=generate_name('data'),
                                       user_type=load)
        else:
            self.layer_type = enumLayerType(load)
            super(Data, self).__init__(name=generate_name('data'),
                                       type=self.layer_type)
        self.is_datalayer = True

        # include/exclude
        setval(self.layer, include=enumPhase(phase))
        #setval(self.layer, exclude=kTest if phase=='train' else kTrain)

        if conf == None:
            if load == 'kData':
                setval(self.layer.Extensions[data_conf], **kwargs)
            else:
                setval(self.layer.store_conf, **kwargs)
        else:
            setval(self.layer, store_conf=conf.proto)

        self.checkpoint = checkpoint # checkpoint for training data
Beispiel #26
0
    def __init__(self, activation='stanh', topk=1):
        '''
        required
          activation = (string)
        optional
          topk       = (int)  // the number of results
        '''

        self.name = activation
        if activation == 'tanh': activation = 'stanh'  # <-- better way to set?
        self.layer_type = enumLayerType(activation)
        super(Activation, self).__init__(name=generate_name(self.name),
                                         type=self.layer_type)
        if activation == 'softmaxloss':
            self.layer.softmaxloss_conf.topk = topk
    def __init__(self, nb_filter=0, kernel=0, stride=1, pad=0,
                 init=None, w_param=None, b_param=None,
                 activation=None, **kwargs):
        '''
        required
          nb_filter = (int)  // the number of filters
          kernel    = (int)  // the size of filter
        optional
          stride    = (int)  // the size of stride
          pad       = (int)  // the size of padding
          init      = (string)     // 'unirom', 'gaussian', 'constant'
          w_param   = (Parameter)  // Parameter object for weight
          b_param   = (Parameter)  // Parameter object for bias
          **kwargs (KEY=VALUE)
            w_lr = (float) // learning rate multiplier for weight, used to
                           // scale the learning rate when updating parameters.
            w_wd = (float) // weight decay multiplier for weight, used to
                           // scale the weight decay when updating parameters.
            b_lr = (float) // learning rate multiplier for bias 
            b_wd = (float) // weight decay multiplier for bias
        '''

        assert nb_filter > 0 and kernel > 0, 'should be set as positive int'
        super(Convolution2D, self).__init__(name=generate_name('conv', 1),
                                            type=kCConvolution)
        fields = {'num_filters' : nb_filter,
                  'kernel' : kernel,
                  'stride' : stride,
                  'pad' : pad}
        setval(self.layer.convolution_conf, **fields)

        # parameter w
        if w_param == None:
            self.init = 'gaussian' if init == None else init
            w_param = Parameter(init=self.init)
        set_param_field(w_param.param, 'w', True, **kwargs)
        setval(self.layer, param=w_param.param)

        # parameter b
        if b_param == None:
            self.init = 'constant' if init == None else init
            b_param = Parameter(init=self.init) # default: constant
        set_param_field(b_param.param, 'b', True, **kwargs)
        setval(self.layer, param=b_param.param)

        # following layers: e.g., activation, dropout, etc.
        if activation:
            self.mask = Activation(activation=activation).layer
    def __init__(self, **kwargs):
        '''
	optional
	  **kwargs
	    name  = (string) // parameter name
	    lr    = (float)  // learning rate multiplier
	    wd    = (float)  // weight decay multiplier
	    init  = (string) // init type {'constant','uniform','gaussian'}
	    value = (int)    // value for 'constant'
	    scale = (float)  // [low=-scale, high=scale] for 'uniform'
	    low   = (float)  // low value   for 'uniform'
	    high  = (float)  // high value  for 'uniform'
	    mean  = (float)  // mean for 'gaussian'
	    std   = (float)  // std  for 'gaussian'
	'''
        fields = {
            'lr_scale': kwargs['lr'] if 'lr' in kwargs else 1,
            'wd_scale': kwargs['wd'] if 'wd' in kwargs else 1
        }
        self.param = Message('Param', **fields).proto

        if not 'name' in kwargs:
            setval(self.param, name=generate_name('param', 1))
        else:
            pname = kwargs['name']
            # parameter name for RBM
            if 'level' in kwargs:
                pname += str(kwargs['level'])
                if pname[0] == 'b':
                    pname += '2'
            setval(self.param, name=pname)

        if 'share_from' in kwargs:
            setval(self.param, share_from=kwargs['share_from'])

        if 'init' in kwargs:
            init_values = get_init_values(kwargs['init'], **kwargs)
            if not kwargs['init'] == 'none':
                pgen = Message('ParamGen',
                               type=enumInitMethod(kwargs['init']),
                               **init_values)
                del kwargs['init']
                setval(self.param, init=pgen.proto)
        else:  # default: uniform
            pgen = Message('ParamGen', type=enumInitMethod('uniform'))
            setval(self.param, init=pgen.proto)
Beispiel #29
0
    def __init__(self, **kwargs):
        '''
	optional
	  **kwargs
	    name  = (string) // parameter name
	    lr    = (float)  // learning rate multiplier
	    wd    = (float)  // weight decay multiplier
	    init  = (string) // init type {'constant','uniform','gaussian'}
	    value = (int)    // value for 'constant'
	    scale = (float)  // [low=-scale, high=scale] for 'uniform'
	    low   = (float)  // low value   for 'uniform'
	    high  = (float)  // high value  for 'uniform'
	    mean  = (float)  // mean for 'gaussian'
	    std   = (float)  // std  for 'gaussian'
	'''
        fields = {'lr_scale' : kwargs['lr'] if 'lr' in kwargs else 1,
                  'wd_scale' : kwargs['wd'] if 'wd' in kwargs else 1
                 }
        self.param = Message('Param', **fields).proto

        if not 'name' in kwargs:
            setval(self.param, name=generate_name('param', 1))
        else:
            pname = kwargs['name']
            # parameter name for RBM
            if 'level' in kwargs:
                pname += str(kwargs['level'])
                if pname[0] == 'b':
                    pname += '2'
            setval(self.param, name=pname)

        if 'share_from' in kwargs:
            setval(self.param, share_from=kwargs['share_from'])

        if 'init' in kwargs:
            init_values = get_init_values(kwargs['init'], **kwargs)
            if not kwargs['init'] == 'none':
                pgen = Message('ParamGen', type=enumInitMethod(kwargs['init']),
                               **init_values)
                del kwargs['init']
                setval(self.param, init=pgen.proto)
        else: # default: uniform
            pgen = Message('ParamGen', type=enumInitMethod('uniform'))
            setval(self.param, init=pgen.proto)
Beispiel #30
0
    def __init__(self, **kwargs):
        '''
        **kwargs (KEY=VALUE)
          partition_dim = (int)  // partition dimension for net
        '''

        self.layer = Message('Layer', **kwargs).proto
        # required field
        if not 'name' in kwargs:
            setval(self.layer, name=generate_name('layer', 1))

        # layer connectivity is set in Model.build()
        self.is_datalayer = False
        self.singalayer = None
        self.srclayers = []

        # set src for Rafiki
        if 'src' in kwargs:
            self.src = kwargs['src']
        else:
            self.src = None
Beispiel #31
0
    def __init__(self, **kwargs):
        '''
        **kwargs (KEY=VALUE)
          partition_dim = (int)  // partition dimension for net
        '''

        self.layer = Message('Layer', **kwargs).proto
        # required field
        if not 'name' in kwargs:
            setval(self.layer, name=generate_name('layer', 1))

        # layer connectivity is set in Model.build()
        self.is_datalayer = False
        self.singalayer = None
        self.srclayers = []

        # set src for Rafiki
        if 'src' in kwargs:
            self.src = kwargs['src']
        else:
            self.src = None
Beispiel #32
0
    def __init__(self, activation='stanh', **kwargs):
        '''
        required
          activation = (string) // relu, sigmoid, tanh, stanh, softmax.
        '''
        if activation == 'tanh':
            print 'Warning: Tanh layer is not supported for CPU'

        self.name = activation
        self.layer_type = kActivation
        if activation == 'stanh':
            self.layer_type = kSTanh
        elif activation == 'softmax':
            self.layer_type = kSoftmax
        super(Activation, self).__init__(name=generate_name(self.name),
                                         type=self.layer_type, **kwargs)
        if activation == 'relu':
            self.layer.activation_conf.type = RELU
        elif activation == 'sigmoid':
            self.layer.activation_conf.type = SIGMOID
        elif activation == 'tanh':
            self.layer.activation_conf.type = TANH # for GPU
Beispiel #33
0
    def __init__(self, activation='stanh', **kwargs):
        '''
        required
          activation = (string) // relu, sigmoid, tanh, stanh, softmax.
        '''
        if activation == 'tanh':
            print 'Warning: Tanh layer is not supported for CPU'

        self.name = activation
        self.layer_type = kActivation
        if activation == 'stanh':
            self.layer_type = kSTanh
        elif activation == 'softmax':
            self.layer_type = kSoftmax
        super(Activation, self).__init__(name=generate_name(self.name),
                                         type=self.layer_type,
                                         **kwargs)
        if activation == 'relu':
            self.layer.activation_conf.type = RELU
        elif activation == 'sigmoid':
            self.layer.activation_conf.type = SIGMOID
        elif activation == 'tanh':
            self.layer.activation_conf.type = TANH  # for GPU
Beispiel #34
0
    def __init__(self, nb_filter=0, kernel=0, stride=1, pad=0,
                 init=None, w_param=None, b_param=None,
                 activation=None, **kwargs):
        '''
        required
          nb_filter = (int)        // the number of filters
          kernel    = (int/tuple)  // the size of filter
        optional
          stride    = (int/tuple)  // the size of stride
          pad       = (int/tuple)  // the size of padding
          init      = (string)     // 'uniform', 'gaussian', 'constant'
          w_param   = (Parameter)  // Parameter object for weight
          b_param   = (Parameter)  // Parameter object for bias
          **kwargs (KEY=VALUE)
            w_lr = (float) // learning rate multiplier for weight, used to
                           // scale the learning rate when updating parameters.
            w_wd = (float) // weight decay multiplier for weight, used to
                           // scale the weight decay when updating parameters.
            b_lr = (float) // learning rate multiplier for bias
            b_wd = (float) // weight decay multiplier for bias
        '''

        assert nb_filter > 0, 'nb_filter should be set as positive int'
        super(Convolution2D, self).__init__(name=generate_name('conv', 1),
                                            type=kCConvolution, **kwargs)
        fields = {"num_filters":nb_filter}
        # for kernel
        if type(kernel) == int:
            fields['kernel'] = kernel
        else:
            fields['kernel_x'] = kernel[0]
            fields['kernel_y'] = kernel[1]
        # for stride
        if type(stride) == int:
            fields['stride'] = stride
        else:
            fields['stride_x'] = stride[0]
            fields['stride_y'] = stride[1]
        # for pad
        if type(pad) == int:
            fields['pad'] = pad
        else:
            fields['pad_x'] = pad[0]
            fields['pad_y'] = pad[1]

        setval(self.layer.convolution_conf, **fields)

        # parameter w
        if w_param == None:
            self.init = 'gaussian' if init == None else init
            w_param = Parameter(init=self.init)
        set_param_field(w_param.param, 'w', True, **kwargs)
        setval(self.layer, param=w_param.param)

        # parameter b
        if b_param == None:
            self.init = 'constant' if init == None else init
            b_param = Parameter(init=self.init) # default: constant
        set_param_field(b_param.param, 'b', True, **kwargs)
        setval(self.layer, param=b_param.param)

        # following layers: e.g., activation, dropout, etc.
        if activation:
            self.mask = Activation(activation=activation).layer
    def __init__(self, out_dim=None, w_param=None, b_param=None,
                 sampling=None, **kwargs):
        '''
        Generate a set of layers (like MLP) according to the number of elements
          in out_dim, and on top of it, two layers RBMVis and RBMHid with
          bidirectional connection
        The layers are expanded and added in Energy.add()

        required
          out_dim  = (int) or (int list) // the number of hidden nodes
        optional
          w_param  = (Parameter)  // Parameter object for weight
          b_param  = (Parameter)  // Parameter object for bias
          sampling = (string)
        '''

        assert out_dim > 0, 'out_dim should be set'
        self.out_dim = [out_dim] if type(out_dim) == int else out_dim

        self.name = kwargs['name'] if 'name' in kwargs else 'RBMVis'
        self.layer_type = kwargs['type'] if 'type' in kwargs else kRBMVis
        super(RBM, self).__init__(name=generate_name(self.name,
                                  withnumber=False), type=self.layer_type)
        setval(self.layer.rbm_conf, hdim=self.out_dim[-1])
        if self.layer_type == kRBMHid and sampling != None:
            if sampling == 'gaussian':
                setval(self.layer.rbm_conf, gaussian=True)

        # parameter w
        if w_param == None:
            w_param = Parameter(init='gaussian', **kwargs)
            set_param_field(w_param.param, 'w', withnumber=False,
                            level=len(self.out_dim), **kwargs)
        else:
            if self.layer_type == kRBMHid:
                del kwargs['name']
            else:
                set_param_field(w_param.param, 'w', withnumber=False,
        	  	        level=len(self.out_dim), **kwargs)
        setval(self.layer, param=w_param.param)

        # parameter b
        if b_param == None:
            b_param = Parameter(init='constant', **kwargs)
            set_param_field(b_param.param, 'b', withnumber=False,
        		    level=len(self.out_dim), **kwargs)
        else:
            if self.layer_type == kRBMHid:
                pass
            else:
                set_param_field(b_param.param, 'b', withnumber=False,
        		        level=len(self.out_dim), **kwargs)
        setval(self.layer, param=b_param.param)

        if self.layer_type == kRBMVis:
            wname = w_param.param.name
            parw = Parameter(name=wname+"_", init='none', share_from=wname)
            bname = b_param.param.name
            parb = Parameter(name=bname+"2", wd=0, init='constant')
            self.bidirect = RBM(self.out_dim, name='RBMHid', type=kRBMHid,
                         w_param=parw, b_param=parb, sampling=sampling).layer
Beispiel #36
0
    def __init__(self,
                 out_dim=None,
                 w_param=None,
                 b_param=None,
                 sampling=None,
                 **kwargs):
        '''
        Generate a set of layers (like MLP) according to the number of elements
          in out_dim, and on top of it, two layers RBMVis and RBMHid with
          bidirectional connection
        The layers are expanded and added in Energy.add()

        required
          out_dim  = (int) or (int list) // the number of hidden nodes
        optional
          w_param  = (Parameter)  // Parameter object for weight
          b_param  = (Parameter)  // Parameter object for bias
          sampling = (string)
        '''

        assert out_dim > 0, 'out_dim should be set'
        self.out_dim = [out_dim] if type(out_dim) == int else out_dim

        self.name = kwargs['name'] if 'name' in kwargs else 'RBMVis'
        self.layer_type = kwargs['type'] if 'type' in kwargs else kRBMVis
        super(RBM, self).__init__(name=generate_name(self.name,
                                                     withnumber=False),
                                  type=self.layer_type)
        setval(self.layer.rbm_conf, hdim=self.out_dim[-1])
        if self.layer_type == kRBMHid and sampling != None:
            if sampling == 'gaussian':
                setval(self.layer.rbm_conf, gaussian=True)

        # parameter w
        if w_param == None:
            w_param = Parameter(init='gaussian', **kwargs)
            set_param_field(w_param.param,
                            'w',
                            withnumber=False,
                            level=len(self.out_dim),
                            **kwargs)
        else:
            if self.layer_type == kRBMHid:
                del kwargs['name']
            else:
                set_param_field(w_param.param,
                                'w',
                                withnumber=False,
                                level=len(self.out_dim),
                                **kwargs)
        setval(self.layer, param=w_param.param)

        # parameter b
        if b_param == None:
            b_param = Parameter(init='constant', **kwargs)
            set_param_field(b_param.param,
                            'b',
                            withnumber=False,
                            level=len(self.out_dim),
                            **kwargs)
        else:
            if self.layer_type == kRBMHid:
                pass
            else:
                set_param_field(b_param.param,
                                'b',
                                withnumber=False,
                                level=len(self.out_dim),
                                **kwargs)
        setval(self.layer, param=b_param.param)

        if self.layer_type == kRBMVis:
            wname = w_param.param.name
            parw = Parameter(name=wname + "_", init='none', share_from=wname)
            bname = b_param.param.name
            parb = Parameter(name=bname + "2", wd=0, init='constant')
            self.bidirect = RBM(self.out_dim,
                                name='RBMHid',
                                type=kRBMHid,
                                w_param=parw,
                                b_param=parb,
                                sampling=sampling).layer
Beispiel #37
0
    def __init__(self,
                 nb_filter=0,
                 kernel=0,
                 stride=1,
                 pad=0,
                 init=None,
                 w_param=None,
                 b_param=None,
                 activation=None,
                 **kwargs):
        '''
        required
          nb_filter = (int)        // the number of filters
          kernel    = (int/tuple)  // the size of filter
        optional
          stride    = (int/tuple)  // the size of stride
          pad       = (int/tuple)  // the size of padding
          init      = (string)     // 'uniform', 'gaussian', 'constant'
          w_param   = (Parameter)  // Parameter object for weight
          b_param   = (Parameter)  // Parameter object for bias
          **kwargs (KEY=VALUE)
            w_lr = (float) // learning rate multiplier for weight, used to
                           // scale the learning rate when updating parameters.
            w_wd = (float) // weight decay multiplier for weight, used to
                           // scale the weight decay when updating parameters.
            b_lr = (float) // learning rate multiplier for bias 
            b_wd = (float) // weight decay multiplier for bias
        '''

        assert nb_filter > 0, 'nb_filter should be set as positive int'
        super(Convolution2D, self).__init__(name=generate_name('conv', 1),
                                            type=kCConvolution)
        fields = {}
        # for kernel
        if type(kernel) == int:
            fields['kernel'] = kernel
        else:
            fields['kernel_x'] = kernel[0]
            fields['kernel_y'] = kernel[1]
        # for stride
        if type(stride) == int:
            fields['stride'] = stride
        else:
            fields['stride_x'] = stride[0]
            fields['stride_y'] = stride[1]
        # for pad
        if type(pad) == int:
            fields['pad'] = pad
        else:
            fields['pad_x'] = pad[0]
            fields['pad_y'] = pad[1]

        setval(self.layer.convolution_conf, **fields)

        # parameter w
        if w_param == None:
            self.init = 'gaussian' if init == None else init
            w_param = Parameter(init=self.init)
        set_param_field(w_param.param, 'w', True, **kwargs)
        setval(self.layer, param=w_param.param)

        # parameter b
        if b_param == None:
            self.init = 'constant' if init == None else init
            b_param = Parameter(init=self.init)  # default: constant
        set_param_field(b_param.param, 'b', True, **kwargs)
        setval(self.layer, param=b_param.param)

        # following layers: e.g., activation, dropout, etc.
        if activation:
            self.mask = Activation(activation=activation).layer