model, optimizer, loss_fn, device=device, use_f16=True, ) @trainer.on(Events.ITERATION_COMPLETED) def update_scheduler(_engine): scheduler.step() evaluator = create_segmentation_evaluator( model, device=device, num_classes=19, ) if local_rank == 0: ProgressBar(persist=False).attach(trainer, ['loss']) ProgressBar(persist=False).attach(evaluator) @trainer.on(Events.EPOCH_COMPLETED) def evaluate(engine): evaluator.run(val_loader) if local_rank == 0:
RunningAverage(output_transform=lambda x: x['supervised_loss']) \ .attach(trainer, 'supervised_loss') RunningAverage(output_transform=lambda x: x['distillation_loss']) \ .attach(trainer, 'distillation_loss') return trainer trainer = create_segmentation_distillation_trainer(student, teacher, optimizer, supervised_loss_fn, distillation_loss_fn, device) trainer.add_event_handler(Events.ITERATION_COMPLETED, scheduler) evaluator = create_segmentation_evaluator(student, device=device, num_classes=19) if local_rank == 0: ProgressBar(persist=True).attach( trainer, ['loss', 'supervised_loss', 'distillation_loss']) ProgressBar(persist=True).attach(evaluator, ['miou', 'accuracy']) @trainer.on(Events.EPOCH_COMPLETED) def evaluate(engine): evaluator.run(val_loader) if local_rank == 0: