def train_model(model: Model, epochs: int, batch_size: int, use_wandb: bool = False) -> Model: """Train model.""" callbacks = [] if EARLY_STOPPING: early_stopping = EarlyStopping(monitor="val_loss", min_delta=0.01, patience=3, verbose=1, mode="auto") callbacks.append(early_stopping) model.network.summary() t = time() _history = model.fit(dataset=dataset, batch_size=batch_size, epochs=epochs, callbacks=callbacks) print("Training took {:2f} s".format(time() - t)) return model
def train_model(model: Model, dataset: Dataset, epochs: int, batch_size: int, learning_rate: float, gpu_ind: Optional[int] = None, use_wandb=False) -> Model: callbacks = [] callbacks.append(Metrics()) if EARLY_STOPPING: early_stopping = EarlyStopping(monitor='val_loss', min_delta=0.01, patience=3, verbose=1, mode='auto') callbacks.append(early_stopping) if GPU_UTIL_SAMPLER and gpu_ind is not None: gpu_utilization = GPUUtilizationSampler(gpu_ind) callbacks.append(gpu_utilization) if use_wandb: wandb = WandbCallback() callbacks.append(wandb) model.network.summary() t = time() history = model.fit(dataset, batch_size, epochs, learning_rate, callbacks) print('Training took {:2f} s'.format(time() - t)) if GPU_UTIL_SAMPLER and gpu_ind is not None: gpu_utilizations = gpu_utilization.samples print( f'GPU utilization: {round(np.mean(gpu_utilizations), 2)} +- {round(np.std(gpu_utilizations), 2)}' ) return model