def _build_model(self, x, y): """Construct the predictive model using feature and label statistics. Args: - x: temporal feature - y: labels Returns: - model: predictor model """ # Parameters dim = len(x[0, 0, :]) max_seq_len = len(x[0, :, 0]) model = tf.keras.Sequential() model.add( layers.Masking(mask_value=-1., input_shape=(max_seq_len, dim))) # Stack multiple layers for _ in range(self.n_layer - 1): model = rnn_sequential(model, self.model_type, self.h_dim, return_seq=True) dim_y = len(y.shape) if dim_y == 2: return_seq_bool = False elif dim_y == 3: return_seq_bool = True else: raise ValueError('Dimension of y {} is not 2 or 3.'.format( str(dim_y))) model = rnn_sequential(model, self.model_type, self.h_dim, return_seq_bool, name='intermediate_state') self.adam = tf.keras.optimizers.Adam(learning_rate=self.learning_rate, beta_1=0.9, beta_2=0.999, amsgrad=False) if self.task == 'classification': if dim_y == 3: model.add( layers.TimeDistributed( layers.Dense(y.shape[-1], activation='sigmoid'))) elif dim_y == 2: model.add(layers.Dense(y.shape[-1], activation='sigmoid')) model.compile(loss=binary_cross_entropy_loss, optimizer=self.adam) elif self.task == 'regression': if dim_y == 3: model.add( layers.TimeDistributed( layers.Dense(y.shape[-1], activation='linear'))) elif dim_y == 2: model.add(layers.Dense(y.shape[-1], activation='linear')) model.compile(loss=mse_loss, optimizer=self.adam, metrics=['mse']) return model
def _build_model(self, x, y): """Construct the transfer learning model using feature and label stats. Args: - x: temporal feature - y: labels Returns: - model: transfer learning model """ # Parameters dim_y = len(y.shape) # Model initialization model = tf.keras.Sequential() adam = tf.keras.optimizers.Adam(learning_rate=self.learning_rate, beta_1=0.9, beta_2=0.999, amsgrad=False) # For one-shot prediction, use MLP if dim_y == 2: for _ in range(self.n_layer - 1): model.add(layers.Dense(self.h_dim, activation='sigmoid')) # For online prediction, use time-series model elif dim_y == 3: for _ in range(self.n_layer - 1): model = rnn_sequential(model, self.model_type, self.h_dim, return_seq=True) # For classification if self.task == 'classification': if dim_y == 3: model.add( layers.TimeDistributed( layers.Dense(y.shape[-1], activation='sigmoid'))) elif dim_y == 2: model.add(layers.Dense(y.shape[-1], activation='sigmoid')) model.compile(loss=binary_cross_entropy_loss, optimizer=adam) # For regression elif self.task == 'regression': if dim_y == 3: model.add( layers.TimeDistributed( layers.Dense(y.shape[-1], activation='linear'))) elif dim_y == 2: model.add(layers.Dense(y.shape[-1], activation='linear')) model.compile(loss=mse_loss, optimizer=adam, metrics=['mse']) return model
def _build_model(self, x, y): """Construct the model using feature and label statistics. Args: - x: features - y: labels Returns: - model: predictor model """ # Parameters h_dim = self.h_dim n_layer = self.n_layer dim = len(x[0, 0, :]) max_seq_len = len(x[0, :, 0]) model = tf.keras.Sequential() model.add(layers.Masking(mask_value=0., input_shape=(max_seq_len, dim))) for _ in range(n_layer - 1): model = rnn_sequential(model, self.model_type, h_dim, return_seq=True) model = rnn_sequential(model, self.model_type, h_dim, return_seq=False) adam = tf.keras.optimizers.Adam(learning_rate=self.learning_rate, beta_1=0.9, beta_2=0.999, amsgrad=False) if self.task == 'classification': model.add(layers.Dense(y.shape[-1], activation='sigmoid')) model.compile(loss=binary_cross_entropy_loss, optimizer=adam) elif self.task == 'regression': model.add(layers.Dense(y.shape[-1], activation='linear')) model.compile(loss=mse_loss, optimizer=adam, metrics=['mse']) return model