def __init__(self, activity_regularizer=None, trainable=True, name=None, use_wscale=False, wlrmul=1., wgain=np.sqrt(2), wnorm=False, supports_caching=False, mask_threshold=0.5, **kwargs): self.weights_dict = {} self.initializers, self.regularizers, self.constraints, self.normalizers = _get_regularizers_from_keywords( kwargs) self.use_wscale = use_wscale self.lrmul = wlrmul self.gain = wgain self.wnorm = wnorm self.supports_caching = supports_caching self.mask_threshold = mask_threshold super(DynastesBaseLayer, self).__init__( trainable=trainable, name=name, activity_regularizer=regularizers.get(activity_regularizer), **kwargs) self.supports_masking = True
def add_weight(self, name=None, shape=None, trainable=None, partitioner=None, initializer=None, regularizer=None, constraint=None, dtype=None, use_wnorm=False, use_resource=None, **kwargs): if initializer is not None: self.initializers[name] = initializers.get(initializer) if regularizer is not None: self.regularizers[name] = regularizers.get(regularizer) if constraint is not None: self.constraints[name] = constraints.get(constraint) _initializer = self.get_initializer(name) if use_wnorm or (self.wnorm and (name in ['kernel', 'embedding'] or name.endswith('kernel'))): if name in self.normalizers and self.normalizers[name] is not None: self.normalizers[name] = weight_normalizers.WeightNormalizer( _initializer, next_layer=self.normalizers[name]) else: self.normalizers[name] = weight_normalizers.WeightNormalizer( _initializer) if self.use_wscale: _initializer = _WscaleInitializer(_initializer, lrmul=self.lrmul) self.initializers[name] = _initializer if name in self.normalizers and self.normalizers[name] is not None: self.normalizers[name] = weight_normalizers.WscaleNormalizer( next_layer=self.normalizers[name], lrmul=self.lrmul, gain=self.gain) else: self.normalizers[name] = weight_normalizers.WscaleNormalizer( lrmul=self.lrmul, gain=self.gain) if dtype is None: dtype = self.dtype or K.floatx() weight = super(DynastesBaseLayer, self).add_weight(name=name, shape=shape, initializer=_initializer, regularizer=self.get_regularizer(name), trainable=trainable, constraint=self.get_constraint(name), partitioner=partitioner, use_resource=use_resource, dtype=dtype, **kwargs) if name in self.normalizers: if self.normalizers[name] is not None: self.normalizers[name].build(shape) self.weights_dict[name] = weight return weight
def __init__(self, activation=None, use_bias=True, activity_regularizer=None, **kwargs): super(ActivatedKernelBiasBaseLayer, self).__init__( activity_regularizer=regularizers.get(activity_regularizer), **kwargs) self.activation = activations.get(activation) self.use_bias = use_bias
def _get_regularizers_from_keywords(kwargs): _initializers = {} _regularizers = {} _constraints = {} _normalizers = {} kwarg_keys = copy.copy(list(kwargs.keys())) for kwarg in kwarg_keys: if kwarg.endswith('initializer'): _initializers[kwarg.split('_initializer')[0]] = _get_initializer( kwargs.pop(kwarg, None)) elif kwarg.endswith('regularizer'): if kwarg != 'activity_regularizer': _regularizers[kwarg.split('_regularizer') [0]] = regularizers.get(kwargs.pop(kwarg, None)) elif kwarg.endswith('constraint'): _constraints[kwarg.split('_constraint')[0]] = constraints.get( kwargs.pop(kwarg, None)) elif kwarg.endswith('normalizer'): _normalizers[kwarg.split('_normalizer') [0]] = weight_normalizers.get(kwargs.pop(kwarg, None)) return _initializers, _regularizers, _constraints, _normalizers
def get_regularizer(self, name): if name not in self.regularizers: self.regularizers[name] = regularizers.get(None) return self.regularizers[name]