Ejemplo n.º 1
0
    def __init__(self,
                 hidden_size: int,
                 hidden_layer_size: int = 100,
                 redistribution_type: str = "pendulum",
                 normaliser: str = "softmax",
                 batch_first: bool = True,
                 initial_output_bias: float = None,
                 scale_c: bool = True,
                 friction: bool = False,
                 aux_input_size: int = 9):
        """
                    
        Parameters
        ----------
        mass_input_size : int
            Number of mass input features at each time step.
        aux_input_size : int
            Number of auxiliary input features at each time step.
        hidden_size : int
            Number of output features at each time step.
        redistribution_type : str, optional
            Specifies how the redistribution matrix should be computed.
        batch_first : bool, optional
            Whether or not the first dimension is the batch dimension.
        """
        super(NoInputMassConserving, self).__init__()
        self.hidden_size = hidden_size
        self.hidden_layer_size = hidden_layer_size
        self.redistribution_type = redistribution_type
        self.initial_output_bias = initial_output_bias
        self.scale_c = scale_c
        self.batch_first = batch_first

        self.friction = friction

        if normaliser == 'sigmoid':
            self.normaliser = NormalisedSigmoid(dim=-1)
        else:
            self.normaliser = nn.Softmax(dim=-1)
        self.out_gate = Gate(self.hidden_size, self.hidden_size)
        # NOTE: without normalised sigmoid here, there seem to be troubles!
        self.redistribution = get_redistribution(
            self.redistribution_type,
            num_states=self.hidden_size,
            num_features=100,
            hidden_layer_size=self.hidden_layer_size,
            normaliser=self.normaliser)
        self.out_gate.reset_parameters()
        self.redistribution.reset_parameters()
        self.reset_parameters()

        self.embedder = nn.Sequential(
            nn.Linear(self.hidden_size + aux_input_size, 50), nn.ReLU(),
            nn.Linear(50, 100), nn.ReLU())

        self.fc_state = nn.Linear(64, hidden_size)
Ejemplo n.º 2
0
    def __init__(self,
                 mass_input_size: int,
                 aux_input_size: int,
                 hidden_size: int,
                 redistribution_type: str = "gate",
                 normaliser: str = "softmax",
                 batch_first: bool = True):
        super().__init__(mass_input_size, aux_input_size, hidden_size, redistribution_type, normaliser, batch_first)

        input_size = self.aux_input_size + hidden_size
        self.out_gate = Gate(self.hidden_size, input_size)
        self.junction = get_redistribution("gate",
                                           num_states=self.mass_input_size,
                                           num_features=input_size,
                                           num_out=self.hidden_size,
                                           normaliser=nn.Softmax(dim=-1))
        self.redistribution = get_redistribution(self.redistribution_type,
                                                 num_states=self.hidden_size,
                                                 num_features=input_size,
                                                 normaliser=self.normaliser)

        self.reset_parameters()
Ejemplo n.º 3
0
    def __init__(self,
                 mass_input_size: int,
                 aux_input_size: int,
                 hidden_size: int,
                 redistribution_type: str = "gate",
                 normaliser: str = "sigmoid",
                 batch_first: bool = True):
        """
        Parameters
        ----------
        mass_input_size : int
            Number of mass input features at each time step.
        aux_input_size : int
            Number of auxiliary input features at each time step.
        hidden_size : int
            Number of output features at each time step.
        redistribution_type : str, optional
            Specifies how the redistribution matrix should be computed.
        batch_first : bool, optional
            Whether or not the first dimension is the batch dimension.
        """
        super(MCLSTM, self).__init__(mass_input_size, aux_input_size, hidden_size, redistribution_type, normaliser,
                                     batch_first)

        in_shape = self.mass_input_size + self.aux_input_size + self.hidden_size
        self.out_gate = Gate(self.hidden_size, in_shape)
        # NOTE: without normalised sigmoid here, there seem to be troubles!
        self.junction = get_redistribution("gate",
                                           num_states=self.mass_input_size,
                                           num_features=in_shape,
                                           num_out=self.hidden_size,
                                           normaliser=self.normaliser)
        self.redistribution = get_redistribution(self.redistribution_type,
                                                 num_states=self.hidden_size,
                                                 num_features=in_shape,
                                                 normaliser=self.normaliser)

        self.reset_parameters()
Ejemplo n.º 4
0
    def __init__(self,
                 mass_input_size: int,
                 aux_input_size: int,
                 hidden_size: int,
                 redistribution_type: str = "gate",
                 normaliser: str = "softmax",
                 batch_first: bool = True):
        super(MCLSTMv2, self).__init__(mass_input_size, aux_input_size, hidden_size, redistribution_type, normaliser,
                                       batch_first)

        self.out_gate = Gate(self.hidden_size, self.aux_input_size)
        # NOTE: without normalised sigmoid here, there seem to be troubles!
        self.junction = get_redistribution("gate",
                                           num_states=self.mass_input_size,
                                           num_features=self.aux_input_size,
                                           num_out=self.hidden_size,
                                           normaliser=nn.Softmax(dim=-1))
        self.redistribution = get_redistribution(self.redistribution_type,
                                                 num_states=self.hidden_size,
                                                 num_features=self.aux_input_size,
                                                 normaliser=self.normaliser)

        self.reset_parameters()
Ejemplo n.º 5
0
 def __init__(self,
              mass_input_size: int,
              aux_input_size: int,
              hidden_size: int,
              redistribution_type: str = "gate",
              normaliser: str = "nonorm",
              batch_first: bool = True):
     super().__init__(mass_input_size, aux_input_size, hidden_size,
                      redistribution_type, normaliser, batch_first)
     self.junction = get_redistribution("gate",
                                        num_states=self.mass_input_size,
                                        num_features=self.aux_input_size,
                                        num_out=self.hidden_size,
                                        normaliser=nn.Sigmoid())
Ejemplo n.º 6
0
    def __init__(self,
                 mass_input_size: int,
                 aux_input_size: int,
                 hidden_size: int,
                 output_size: int,
                 redistribution_type: str = "gate",
                 normaliser: str = "softmax",
                 batch_first: bool = True):
        super().__init__(mass_input_size, aux_input_size, hidden_size, redistribution_type, normaliser, batch_first)

        self.output_size = output_size
        self.final = get_redistribution("linear",
                                        num_states=self.hidden_size,
                                        num_features=0,
                                        num_out=self.output_size,
                                        normaliser=nn.Softmax(dim=-1))
        self.final.reset_parameters()
Ejemplo n.º 7
0
 def __init__(self,
              mass_input_size,
              aux_input_size,
              hidden_size,
              redistribution_type: str = "linear",
              normaliser: str = "id",
              batch_first: bool = True):
     super(RLSTM,
           self).__init__(mass_input_size, aux_input_size, hidden_size,
                          redistribution_type, normaliser, batch_first)
     input_size = mass_input_size + aux_input_size + hidden_size
     self.gates = nn.Linear(input_size, 2 * hidden_size)
     self.connections = nn.Linear(input_size, hidden_size)
     self.redistribution = get_redistribution(self.redistribution_type,
                                              num_states=self.hidden_size,
                                              num_features=input_size,
                                              normaliser=self.normaliser)