def from_params( cls, params: List[Tuple[str, Params]]) -> Optional['RegularizerApplicator']: """ Converts a List of pairs (regex, params) into an RegularizerApplicator. This list should look like [["regex1": {"type": "l2", "alpha": 0.01}], ["regex2": "l1"]] where each parameter receives the penalty corresponding to the first regex that matches its name (which may be no regex and hence no penalty). The values can either be strings, in which case they correspond to the names of regularizers, or dictionaries, in which case they must contain the "type" key, corresponding to the name of a regularizer. In addition, they may contain auxiliary named parameters which will be fed to the regularizer itself. To determine valid auxiliary parameters, please refer to the torch.nn.init documentation. Parameters ---------- params : ``Params``, required. A Params object containing a "regularizers" key. Returns ------- A RegularizerApplicator containing the specified Regularizers, or ``None`` if no Regularizers are specified. """ if not params: return None instantiated_regularizers = [] for parameter_regex, regularizer_params in params: if isinstance(regularizer_params, str): regularizer = Regularizer.by_name(regularizer_params)() else: regularizer_type = Regularizer.by_name( regularizer_params.pop("type")) regularizer = regularizer_type( **regularizer_params) # type: ignore instantiated_regularizers.append((parameter_regex, regularizer)) return RegularizerApplicator(instantiated_regularizers)
def from_params(cls, params: List[Tuple[str, Params]]) -> Optional['RegularizerApplicator']: """ Converts a List of pairs (regex, params) into an RegularizerApplicator. This list should look like [["regex1": {"type": "l2", "alpha": 0.01}], ["regex2": "l1"]] where each parameter receives the penalty corresponding to the first regex that matches its name (which may be no regex and hence no penalty). The values can either be strings, in which case they correspond to the names of regularizers, or dictionaries, in which case they must contain the "type" key, corresponding to the name of a regularizer. In addition, they may contain auxiliary named parameters which will be fed to the regularizer itself. To determine valid auxiliary parameters, please refer to the torch.nn.init documentation. Parameters ---------- params : ``Params``, required. A Params object containing a "regularizers" key. Returns ------- A RegularizerApplicator containing the specified Regularizers, or ``None`` if no Regularizers are specified. """ if not params: return None instantiated_regularizers = [] for parameter_regex, regularizer_params in params: if isinstance(regularizer_params, str): regularizer = Regularizer.by_name(regularizer_params)() else: regularizer_type = Regularizer.by_name(regularizer_params.pop("type")) regularizer = regularizer_type(**regularizer_params) # type: ignore instantiated_regularizers.append((parameter_regex, regularizer)) return RegularizerApplicator(instantiated_regularizers)
def test_registry_has_builtin_regularizers(self): assert Regularizer.by_name(u'l1').__name__ == u'L1Regularizer' assert Regularizer.by_name(u'l2').__name__ == u'L2Regularizer'
def test_registry_has_builtin_regularizers(self): assert Regularizer.by_name("l1").__name__ == "L1Regularizer" assert Regularizer.by_name("l2").__name__ == "L2Regularizer"
from __future__ import absolute_import import torch from allennlp.nn.regularizers.regularizer import Regularizer class L1Regularizer(Regularizer): u"""Represents a penalty proportional to the sum of the absolute values of the parameters""" def __init__(self, alpha = 0.01) : self.alpha = alpha def __call__(self, parameter ) : return self.alpha * torch.sum(torch.abs(parameter)) L1Regularizer = Regularizer.register(u"l1")(L1Regularizer) class L2Regularizer(Regularizer): u"""Represents a penalty proportional to the sum of squared values of the parameters""" def __init__(self, alpha = 0.01) : self.alpha = alpha def __call__(self, parameter ) : return self.alpha * torch.sum(torch.pow(parameter, 2)) L2Regularizer = Regularizer.register(u"l2")(L2Regularizer)
def test_registry_has_builtin_regularizers(self): assert Regularizer.by_name('l1').__name__ == 'L1Regularizer' assert Regularizer.by_name('l2').__name__ == 'L2Regularizer'