def _get_bias_init(config): config = config or { 'typename': 'ConstantInitializer', 'args': { 'value': 0.1 } } return getter.get_initializer(config['typename'])(**config.get('args', {}))
def _get_bias_init(config): """Make bias initializer. Default to Constant (0.1)""" config = config or { 'typename': 'ConstantInitializer', 'args': { 'value': 0.1 } } return get_initializer(config['typename'])(**config.get('args', {}))
def _instantiate_parameters(self, input_shape, dtype): dim = len(input_shape) shape = tuple(input_shape[i] for i in range(dim) if i == 1) self._axes = tuple(i for i in range(dim) if not i == 1) self._pattern = tuple((0 if i == 1 else 'x') for i in range(dim)) _LG.debug(' Shape: %s', shape) _LG.debug(' Axes: %s', self._axes) _LG.debug(' Pattern: %s', self._pattern) const_init = get_initializer('ConstantInitializer') if self._parameter_variables['mean'] is None: mean = wrapper.get_variable(name='mean', shape=shape, trainable=False, initializer=const_init(0), dtype=dtype) self.set_parameter_variables(mean=mean) if self._parameter_variables['var'] is None: var = wrapper.get_variable(name='var', shape=shape, trainable=False, initializer=const_init(1), dtype=dtype) self.set_parameter_variables(var=var) if self._parameter_variables['scale'] is None: scale_val = self.args['scale'] scale = wrapper.get_variable(name='scale', shape=shape, trainable=True, initializer=const_init(scale_val), dtype=dtype) self.set_parameter_variables(scale=scale) if self._parameter_variables['offset'] is None: offset_val = self.args['offset'] offset = wrapper.get_variable(name='offset', shape=shape, trainable=True, initializer=const_init(offset_val), dtype=dtype) self.set_parameter_variables(offset=offset)
def _instantiate_parameters(self, input_shape): dim, fmt = len(input_shape), luchador.get_nn_conv_format() channel = 1 if dim == 2 or fmt == 'NCHW' else 3 self._axes = tuple(i for i in range(dim) if not i == channel) shape = tuple(input_shape[i] for i in range(dim) if i == channel) const_init = get_initializer('ConstantInitializer') if self.get_parameter_variable('mean') is None: mean = wrapper.get_variable(name='mean', shape=shape, initializer=const_init(0), trainable=False) self.set_parameter_variables(mean=mean) if self.get_parameter_variable('var') is None: var = wrapper.get_variable(name='var', shape=shape, initializer=const_init(1), trainable=False) self.set_parameter_variables(var=var) if self.get_parameter_variable('scale') is None: scale = wrapper.get_variable(name='scale', shape=shape, trainable=True, initializer=const_init( self.args['scale'])) self.set_parameter_variables(scale=scale) if self.get_parameter_variable('offset') is None: offset = wrapper.get_variable(name='offset', shape=shape, trainable=True, initializer=const_init( self.args['offset'])) self.set_parameter_variables(offset=offset)
def _get_weight_init(config): config = config or {'typename': 'XavierInitializer'} return getter.get_initializer(config['typename'])(**config.get('args', {}))
def _get_filter_init(config): """Make filter initializer. Default to Xavier""" config = config or {'typename': 'XavierInitializer'} return get_initializer(config['typename'])(**config.get('args', {}))