def log_epoch_stats(self, cur_epoch, writer, params=0, flops=0, is_master=False): stats = self.get_epoch_stats(cur_epoch) lu.log_json_stats(stats, cur_epoch, writer, is_epoch=False, params=params, flops=flops, is_master=is_master)
def compute_precise_time(model, loss_fun): """Computes precise time.""" # Generate a dummy mini-batch im_size = cfg.TRAIN.IM_SIZE inputs = torch.rand(cfg.PREC_TIME.BATCH_SIZE, 3, im_size, im_size) labels = torch.zeros(cfg.PREC_TIME.BATCH_SIZE, dtype=torch.int64) # Copy the data to the GPU inputs = inputs.cuda(non_blocking=False) labels = labels.cuda(non_blocking=False) # Compute precise time fw_test_time = compute_fw_test_time(model, inputs) fw_time, bw_time = compute_fw_bw_time(model, loss_fun, inputs, labels) # Log precise time lu.log_json_stats({ "prec_test_fw_time": fw_test_time, "prec_train_fw_time": fw_time, "prec_train_bw_time": bw_time, "prec_train_fw_bw_time": fw_time + bw_time, })
def log_epoch_stats(self, cur_epoch): stats = self.get_epoch_stats(cur_epoch) lu.log_json_stats(stats)
def log_iter_stats(self, cur_epoch, cur_iter): if (cur_iter + 1) % cfg.LOG_PERIOD != 0: return stats = self.get_iter_stats(cur_epoch, cur_iter) lu.log_json_stats(stats)