def test_train_models_on_samples_with_x_and_y(self): """ Model should be able to train using separated x and y values """ num_timesteps = 100 num_channels = 2 num_samples_train = 5 num_samples_val = 3 X_train = np.random.rand(num_samples_train, num_timesteps, num_channels) y_train = to_categorical(np.array([0, 0, 1, 1, 1])) X_val = np.random.rand(num_samples_val, num_timesteps, num_channels) y_val = to_categorical(np.array([0, 1, 1])) batch_size = 20 custom_settings = get_default_settings() model_type = CNN(X_train.shape, 2, **custom_settings) hyperparams = model_type.generate_hyperparameters() model = model_type.create_model(**hyperparams) models = [(model, hyperparams, "CNN")] histories, _, _ = \ find_architecture.train_models_on_samples( X_train, y_train, X_val, y_val, models, nr_epochs=1, subset_size=10, verbose=False, outputfile=None, early_stopping_patience='auto', batch_size=batch_size) assert len(histories) == 1
def test_train_models_on_samples_with_dataset(self): """ Model should be able to train using a dataset as an input """ num_timesteps = 100 num_channels = 2 num_samples_train = 5 num_samples_val = 3 X_train = np.random.rand(num_samples_train, num_timesteps, num_channels) y_train = to_categorical(np.array([0, 0, 1, 1, 1])) X_val = np.random.rand(num_samples_val, num_timesteps, num_channels) y_val = to_categorical(np.array([0, 1, 1])) batch_size = 20 data_train = tf.data.Dataset.from_tensor_slices( (X_train, y_train)).batch(batch_size) data_val = tf.data.Dataset.from_tensor_slices( (X_val, y_val)).batch(batch_size) custom_settings = get_default_settings() model_type = CNN(X_train.shape, 2, **custom_settings) hyperparams = model_type.generate_hyperparameters() model = model_type.create_model(**hyperparams) models = [(model, hyperparams, "CNN")] histories, val_metrics, val_losses = \ find_architecture.train_models_on_samples( data_train, None, data_val, None, models, nr_epochs=1, subset_size=None, verbose=False, outputfile=None, early_stopping_patience='auto', batch_size=batch_size)
def test_train_models_on_samples_with_generators(self): """ Model should be able to train using a generator as an input """ num_timesteps = 100 num_channels = 2 num_samples_train = 5 num_samples_val = 3 X_train = np.random.rand(num_samples_train, num_timesteps, num_channels) y_train = to_categorical(np.array([0, 0, 1, 1, 1])) X_val = np.random.rand(num_samples_val, num_timesteps, num_channels) y_val = to_categorical(np.array([0, 1, 1])) batch_size = 20 class DataGenerator(Sequence): def __init__(self, x_set, y_set, batch_size): self.x, self.y = x_set, y_set self.batch_size = batch_size def __len__(self): return math.ceil(len(self.x) / self.batch_size) def __getitem__(self, idx): batch_x = self.x[idx * self.batch_size:(idx + 1) * self.batch_size] batch_y = self.y[idx * self.batch_size:(idx + 1) * self.batch_size] return batch_x, batch_y data_train = DataGenerator(X_train, y_train, batch_size) data_val = DataGenerator(X_val, y_val, batch_size) custom_settings = get_default_settings() model_type = CNN(X_train.shape, 2, **custom_settings) hyperparams = model_type.generate_hyperparameters() model = model_type.create_model(**hyperparams) models = [(model, hyperparams, "CNN")] histories, _, _ = \ find_architecture.train_models_on_samples( data_train, None, data_val, None, models, nr_epochs=1, subset_size=None, verbose=False, outputfile=None, early_stopping_patience='auto', batch_size=batch_size) assert len(histories) == 1