train_hist.add(logs=train_logs, epoch=epoch + 1) epoch_time_end = time.time() print( 'Epoch {}:\tAverage Loss: {:.4f}\tAverage Accuracy: {:.3f}\tEpoch Time: {:.3f} hours' .format( epoch + 1, avg_loss_train, avg_acc_train, (epoch_time_end - epoch_time_start) / 3600, )) if lr_scheduler is not None: lr_scheduler.step(validation_logs['loss']) if epoch > 0: ckpter.last_delete_and_save(epoch=epoch, monitor='acc', loss_acc=validation_logs) ckpter_lr.last_delete_and_save(epoch=epoch, monitor='acc', loss_acc=validation_logs) ckpter_auc.last_delete_and_save(epoch=epoch, monitor='auc', loss_acc=validation_logs) ckpter_auc_lr.last_delete_and_save(epoch=epoch, monitor='auc', loss_acc=validation_logs) ckpter.check_on(epoch=epoch, monitor='acc', loss_acc=validation_logs) ckpter_lr.check_on(epoch=epoch, monitor='acc', loss_acc=validation_logs)
train_logs = { 'loss': avg_triplet_loss, 'acc': np.mean(Ptp01), 'loss_all_avg': loss_all_avg, 'acc001': np.mean(Ptp001), 'nonzeros': nonzeros } train_hist.add(logs=train_logs, epoch=epoch + 1) epoch_time_end = time.time() # -------------------------------------------------------------------------------------- # Save last model parameters and check if it is the best # -------------------------------------------------------------------------------------- if epoch > 0: ckpter.last_delete_and_save(epoch=epoch, monitor='acc', loss_acc=validation_logs) ckpter_v2.last_delete_and_save(epoch=epoch, monitor='acc001', loss_acc=validation_logs) if num_triplets: ckpter.check_on(epoch=epoch, monitor='acc', loss_acc=validation_logs) ckpter_v2.check_on(epoch=epoch, monitor='acc001', loss_acc=validation_logs) print( 'Epoch {}:\tAverage Triplet Loss: {:.3f}\tEpoch Time: {:.3f} hours\tNumber of valid training triplets in epoch: {}' .format(epoch + 1, avg_triplet_loss, (epoch_time_end - epoch_time_start) / 3600, num_triplets))