Пример #1
0
    def __init__(self, size=0, **kwargs):
        '''
        required
          size = (int)  // local size
        '''

        super(LRN2D, self).__init__(name=generate_name('norm'), type=kLRN)
        # required
        assert size != 0, 'local size should be set'
        self.layer.lrn_conf.local_size = size
        init_values = get_init_values('lrn2d', **kwargs)
        setval(self.layer.lrn_conf, **init_values)
Пример #2
0
    def __init__(self, size=0, **kwargs):
        '''
        required
          size = (int)  // local size
        '''

        super(LRN2D, self).__init__(name=generate_name('norm'), type=kLRN)
        # required
        assert size != 0, 'local size should be set'
        self.layer.lrn_conf.local_size = size
        init_values = get_init_values('lrn2d', **kwargs)
        setval(self.layer.lrn_conf, **init_values)
Пример #3
0
    def __init__(self, **kwargs):
        '''
	optional
	  **kwargs
	    name  = (string) // parameter name
	    lr    = (float)  // learning rate multiplier
	    wd    = (float)  // weight decay multiplier
	    init  = (string) // init type {'constant','uniform','gaussian'}
	    value = (int)    // value for 'constant'
	    scale = (float)  // [low=-scale, high=scale] for 'uniform'
	    low   = (float)  // low value   for 'uniform'
	    high  = (float)  // high value  for 'uniform'
	    mean  = (float)  // mean for 'gaussian'
	    std   = (float)  // std  for 'gaussian'
	'''
        fields = {
            'lr_scale': kwargs['lr'] if 'lr' in kwargs else 1,
            'wd_scale': kwargs['wd'] if 'wd' in kwargs else 1
        }
        self.param = Message('Param', **fields).proto

        if not 'name' in kwargs:
            setval(self.param, name=generate_name('param', 1))
        else:
            pname = kwargs['name']
            # parameter name for RBM
            if 'level' in kwargs:
                pname += str(kwargs['level'])
                if pname[0] == 'b':
                    pname += '2'
            setval(self.param, name=pname)

        if 'share_from' in kwargs:
            setval(self.param, share_from=kwargs['share_from'])

        if 'init' in kwargs:
            init_values = get_init_values(kwargs['init'], **kwargs)
            if not kwargs['init'] == 'none':
                pgen = Message('ParamGen',
                               type=enumInitMethod(kwargs['init']),
                               **init_values)
                del kwargs['init']
                setval(self.param, init=pgen.proto)
        else:  # default: uniform
            pgen = Message('ParamGen', type=enumInitMethod('uniform'))
            setval(self.param, init=pgen.proto)
Пример #4
0
    def __init__(self, **kwargs):
        '''
	optional
	  **kwargs
	    name  = (string) // parameter name
	    lr    = (float)  // learning rate multiplier
	    wd    = (float)  // weight decay multiplier
	    init  = (string) // init type {'constant','uniform','gaussian'}
	    value = (int)    // value for 'constant'
	    scale = (float)  // [low=-scale, high=scale] for 'uniform'
	    low   = (float)  // low value   for 'uniform'
	    high  = (float)  // high value  for 'uniform'
	    mean  = (float)  // mean for 'gaussian'
	    std   = (float)  // std  for 'gaussian'
	'''
        fields = {'lr_scale' : kwargs['lr'] if 'lr' in kwargs else 1,
                  'wd_scale' : kwargs['wd'] if 'wd' in kwargs else 1
                 }
        self.param = Message('Param', **fields).proto

        if not 'name' in kwargs:
            setval(self.param, name=generate_name('param', 1))
        else:
            pname = kwargs['name']
            # parameter name for RBM
            if 'level' in kwargs:
                pname += str(kwargs['level'])
                if pname[0] == 'b':
                    pname += '2'
            setval(self.param, name=pname)

        if 'share_from' in kwargs:
            setval(self.param, share_from=kwargs['share_from'])

        if 'init' in kwargs:
            init_values = get_init_values(kwargs['init'], **kwargs)
            if not kwargs['init'] == 'none':
                pgen = Message('ParamGen', type=enumInitMethod(kwargs['init']),
                               **init_values)
                del kwargs['init']
                setval(self.param, init=pgen.proto)
        else: # default: uniform
            pgen = Message('ParamGen', type=enumInitMethod('uniform'))
            setval(self.param, init=pgen.proto)