Пример #1
0
    def _build_model(self, x, y):
        """Construct the model using feature and label statistics.
        
        Args:
            - x: temporal feature
            - y: labels
            
        Returns:
            - model: interpretation model
        """
        # Parameters
        dim = len(x[0, 0, :])
        max_seq_len = len(x[0, :, 0])

        # Build one input, two outputs model
        main_input = Input(shape=(max_seq_len, dim), dtype="float32")
        mask_layer = Masking(mask_value=-1.0)(main_input)

        select_layer = rnn_layer(mask_layer, self.model_type, self.h_dim, return_seq=True)

        for _ in range(self.n_layer):
            select_layer = rnn_layer(select_layer, self.model_type, self.h_dim, return_seq=True)

        select_layer = TimeDistributed(Dense(dim, activation="sigmoid"))(select_layer)

        select_layer = Lambda(lambda x: x - 0.5)(select_layer)
        select_layer = Activation("relu")(select_layer)
        select_out = Lambda(lambda x: x * 2, name="select")(select_layer)

        # Second output
        pred_layer = Multiply()([mask_layer, select_out])

        for _ in range(self.n_layer - 1):
            pred_layer = rnn_layer(pred_layer, self.model_type, self.h_dim, return_seq=True)

        return_seq_bool = len(y.shape) == 3
        pred_layer = rnn_layer(pred_layer, self.model_type, self.h_dim, return_seq_bool)

        if self.task == "classification":
            act_fn = "sigmoid"
        elif self.task == "regression":
            act_fn = "linear"

        if len(y.shape) == 3:
            pred_out = TimeDistributed(Dense(y.shape[-1], activation=act_fn), name="predict")(pred_layer)
        elif len(y.shape) == 2:
            pred_out = Dense(y.shape[-1], activation=act_fn, name="predict")(pred_layer)

        model = Model(inputs=main_input, outputs=[select_out, pred_out])

        adam = tf.keras.optimizers.Adam(learning_rate=self.learning_rate, beta_1=0.9, beta_2=0.999, amsgrad=False)

        model.compile(
            loss={"select": select_loss, "predict": rmse_loss},
            optimizer=adam,
            loss_weights={"select": 0.01, "predict": 1},
        )

        return model
Пример #2
0
    def _build_model(self, x, y):
        """Construct the model using feature and label statistics.
        
        Args:
            - x: temporal feature
            - y: labels
            
        Returns:
            - model: predictor model
        """
        self.model_type = "gru"

        # Only for one-shot prediction
        assert len(y.shape) == 2

        # Parameters
        dim = len(x[0, 0, :])
        seq_len = len(x[0, :, 0])

        self.adam = tf.keras.optimizers.Adam(learning_rate=self.learning_rate,
                                             beta_1=0.9,
                                             beta_2=0.999,
                                             amsgrad=False)

        inputs = Input(shape=(
            seq_len,
            dim,
        ))
        rnn_out = rnn_layer(inputs,
                            self.model_type,
                            self.h_dim,
                            return_seq=True)
        for _ in range(self.n_layer - 1):
            rnn_out = rnn_layer(rnn_out,
                                self.model_type,
                                self.h_dim,
                                return_seq=True)

        attention_output = self.attention_3d_block(rnn_out)

        if self.task == "classification":
            output = Dense(y.shape[-1], activation="sigmoid",
                           name="output")(attention_output)
            attention_model = Model(inputs=[inputs], outputs=[output])
            attention_model.compile(loss=binary_cross_entropy_loss,
                                    optimizer=self.adam)
        elif self.task == "regression":
            output = Dense(y.shape[-1], activation="linear",
                           name="output")(attention_output)
            attention_model = Model(inputs=[inputs], outputs=[output])
            attention_model.compile(loss=mse_loss,
                                    optimizer=self.adam,
                                    metrics=["mse"])

        return attention_model
Пример #3
0
    def _build_model(self, x, y):
        """Construct ASAC model using feature and label statistics.
    
    Args:
      - x: temporal feature
      - y: labels
      
    Returns:
      - model: asac model
    """

        # Parameters
        h_dim = self.h_dim
        n_layer = self.n_layer
        dim = len(x[0, 0, :])
        max_seq_len = len(x[0, :, 0])

        # Build one input, two outputs model
        main_input = Input(shape=(max_seq_len, dim), dtype='float32')
        mask_layer = Masking(mask_value=-1.)(main_input)
        previous_input = Input(shape=(max_seq_len, dim), dtype='float32')
        previous_mask_layer = Masking(mask_value=-1.)(previous_input)

        select_layer = rnn_layer(previous_mask_layer,
                                 self.model_type,
                                 h_dim,
                                 return_seq=True)
        for _ in range(n_layer):
            select_layer = rnn_layer(select_layer,
                                     self.model_type,
                                     h_dim,
                                     return_seq=True)
        select_layer = TimeDistributed(Dense(
            dim, activation='sigmoid'))(select_layer)

        # Sampling the selection
        select_layer = Lambda(lambda x: x - 0.5)(select_layer)
        select_layer = Activation('relu')(select_layer)
        select_out = Lambda(lambda x: x * 2, name='select')(select_layer)

        # Second output
        pred_layer = Multiply()([mask_layer, select_out])

        for _ in range(n_layer - 1):
            pred_layer = rnn_layer(pred_layer,
                                   self.model_type,
                                   h_dim,
                                   return_seq=True)

        return_seq_bool = len(y.shape) == 3
        pred_layer = rnn_layer(pred_layer, self.model_type, h_dim,
                               return_seq_bool)

        if self.task == 'classification': act_fn = 'sigmoid'
        elif self.task == 'regression': act_fn = 'linear'

        if len(y.shape) == 3:
            pred_out = TimeDistributed(Dense(y.shape[-1], activation=act_fn),
                                       name='predict')(pred_layer)
        elif len(y.shape) == 2:
            pred_out = Dense(y.shape[-1], activation=act_fn,
                             name='predict')(pred_layer)

        model = Model(inputs=[main_input, previous_input],
                      outputs=[select_out, pred_out])
        # Optimizer
        adam = tf.keras.optimizers.Adam(learning_rate=self.learning_rate,
                                        beta_1=0.9,
                                        beta_2=0.999,
                                        amsgrad=False)
        # Model compile
        if self.task == 'classification':
            model.compile(loss={
                'select': select_loss,
                'predict': binary_cross_entropy_loss
            },
                          optimizer=adam,
                          loss_weights={
                              'select': 0.01,
                              'predict': 1
                          })
        elif self.task == 'regression':
            model.compile(loss={
                'select': select_loss,
                'predict': rmse_loss
            },
                          optimizer=adam,
                          loss_weights={
                              'select': 0.01,
                              'predict': 1
                          })

        return model