def train_model_2(config: ConfigParser, model: tf.keras.Sequential, data: Data, save_path: Path, checkpoint_path: Path) -> None: version = config['Model']['version'] callbacks = [ tfa.callbacks.AverageModelCheckpoint(filepath=str(checkpoint_path) + '/cp-{epoch:04d}.ckpt', update_weights=True), tf.keras.callbacks.TensorBoard(log_dir=f'logs/{version}_model_2', profile_batch='100, 110', histogram_freq=1, update_freq='batch') ] optimizer = tf.keras.optimizers.SGD( learning_rate=float(config['Model']['learning_rate'])) # 35 below obtained by inspecting the epoch at which convergence occurred on validation set with TensorBoard. optimizer = tfa.optimizers.SWA(optimizer, start_averaging=35, average_period=int( config['Model']['n_models'])) model.compile( optimizer=optimizer, loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False), metrics=['accuracy']) model.fit(data.training_dataset, epochs=1000, validation_data=data.validation_dataset, callbacks=callbacks) # Save the model model.save(save_path) # Remove the model from memory, since OOM might occur. del model
def compile_model(model: tf.keras.Sequential, lr=0.001, optim='sgd') -> None: loss = tf.keras.losses.SparseCategoricalCrossentropy() metrics = [tf.keras.metrics.SparseCategoricalAccuracy()] if optim == 'sgd': model.compile(tf.keras.optimizers.SGD(learning_rate=lr), loss=loss, metrics=metrics) elif optim == 'adam': model.compile(tf.keras.optimizers.Adam(learning_rate=lr), loss=loss, metrics=metrics) elif optim == 'rmsprop': model.compile(tf.keras.optimizers.RMSprop(learning_rate=lr), loss=loss, metrics=metrics) else: raise ValueError( "Parameter `optim` accepts {'sgd', 'adam', 'rmsprop'}, " f"got {optim}")
def compile_model(tf_model: tf.keras.Sequential, settings: dict, loss_func: tf.keras.losses.Loss) -> tf.keras.Sequential: tf_model.compile(loss=loss_func, **settings) tf_model.summary() return tf_model