Пример #1
0
 def __init__(self,
              in_features: int,
              event_size: int,
              input_dependent_scale: bool = True):
     super().__init__()
     self.loc_module = nn.Linear(in_features, event_size)
     if input_dependent_scale:
         self.log_scale_module = nn.Linear(in_features, event_size)
     else:
         self.log_scale_module = LeafParameter(event_size)
     self.apply(initialize_("orthogonal", gain=0.01))
Пример #2
0
 def __init__(self, in_size, out_size, layer_norm=True, hidden_size=3):
     super().__init__()
     fully_connected = FullyConnected(
         in_size,
         units=(hidden_size, ) * 2,
         activation="ReLU",
         layer_norm=layer_norm,
     )
     linear = nn.Linear(fully_connected.out_features, out_size)
     linear.apply(initialize_("uniform", a=-3e-3, b=3e-3))
     self.net = nn.Sequential(fully_connected, linear)
Пример #3
0
    def __init__(self,
                 in_features: int,
                 units: Tuple[int, ...] = (),
                 activation: str = None,
                 layer_norm: bool = False,
                 **initializer_options):
        super().__init__()
        self.in_features = in_features
        activ = get_activation(activation)
        units = (self.in_features, ) + tuple(units)
        modules = []
        for in_dim, out_dim in zip(units[:-1], units[1:]):
            modules.append(nn.Linear(in_dim, out_dim))
            if layer_norm:
                modules.append(nn.LayerNorm(out_dim))
            if activ:
                modules.append(activ())
        self.out_features = units[-1]
        self.sequential = nn.Sequential(*modules)

        if "name" in initializer_options:
            self.apply(
                initialize_(activation=activation, **initializer_options))
Пример #4
0
 def __init__(self, in_features: int, n_categories: int):
     super().__init__()
     self.logits_module = nn.Linear(in_features, n_categories)
     self.apply(initialize_("orthogonal", gain=0.01))
Пример #5
0
 def __init__(self, *args, beta, **kwargs):
     super().__init__()
     self.linear = nn.Linear(*args, **kwargs)
     self.beta = beta
     self.apply(initialize_("xavier_uniform", activation="tanh"))