def __init__(self,
                 input_size_dyn: int,
                 input_size_stat: int,
                 hidden_size: int,
                 embedding_hiddens: List = [],
                 initial_forget_bias: int = 5,
                 dropout: float = 0.0,
                 concat_static: bool = True):
        """Initialize model.

        Parameters
        ----------
        input_size_dyn: int
            Number of dynamic input features.
        input_size_stat: int
            Number of static input features (used in the EA-LSTM input gate).
        hidden_size: int
            Number of LSTM cells/hidden units.
        initial_forget_bias: int
            Value of the initial forget gate bias. (default: 5)
        dropout: float
            Dropout probability in range(0,1). (default: 0.0)
        concat_static: bool
            If True, uses standard LSTM otherwise uses EA-LSTM
        """
        super(Model, self).__init__()
        self.input_size_dyn = input_size_dyn
        self.input_size_stat = input_size_stat
        self.hidden_size = hidden_size
        self.embedding_hiddens = embedding_hiddens
        self.initial_forget_bias = initial_forget_bias
        self.dropout_rate = dropout
        self.concat_static = concat_static

        if self.concat_static:
            if self.embedding_hiddens:
                self.lstm = EmbLSTM(input_size_dyn=input_size_dyn, 
                                    input_size_stat=input_size_stat,
                                    hidden_size=hidden_size, 
                                    embedding_hiddens=embedding_hiddens,
                                    initial_forget_bias=initial_forget_bias)
            else:
                self.lstm = LSTM(
                    input_size=input_size_dyn,
                    hidden_size=hidden_size,
                    initial_forget_bias=initial_forget_bias)
        else:
            self.lstm = EALSTM(
                input_size_dyn=input_size_dyn,
                input_size_stat=input_size_stat,
                hidden_size=hidden_size,
                initial_forget_bias=initial_forget_bias)

        self.dropout = nn.Dropout(p=dropout)
        self.fc = nn.Linear(hidden_size, 1)
Example #2
0
    def __init__(self,
                 input_size_dyn: int,
                 hidden_size: int,
                 initial_forget_bias: int = 5,
                 dropout: float = 0.0,
                 concat_static: bool = False,
                 no_static: bool = False):
        """Initialize model.

        Parameters
        ----------
        input_size_dyn: int
            Number of dynamic input features.
        hidden_size: int
            Number of LSTM cells/hidden units.
        initial_forget_bias: int
            Value of the initial forget gate bias. (default: 5)
        dropout: float
            Dropout probability in range(0,1). (default: 0.0)
        concat_static: bool
            If True, uses standard LSTM otherwise uses EA-LSTM
        no_static: bool
            If True, runs standard LSTM
        """
        super(Model, self).__init__()
        self.input_size_dyn = input_size_dyn
        self.hidden_size = hidden_size
        self.initial_forget_bias = initial_forget_bias
        self.dropout_rate = dropout
        self.concat_static = concat_static
        self.no_static = no_static

        self.lstm = LSTM(input_size=input_size_dyn,
                         hidden_size=hidden_size,
                         initial_forget_bias=initial_forget_bias)

        self.dropout = nn.Dropout(p=dropout)
        self.fc = nn.Linear(hidden_size, 1)