import torch from torch.utils.tensorboard import SummaryWriter # Create a SummaryWriter object with logdir argument writer = SummaryWriter(log_dir='./logs') # Get the log directory log_dir = writer.get_logdir() print(f'The log directory is {log_dir}')
import torch from torch.utils.tensorboard import SummaryWriter # Create a SummaryWriter object writer = SummaryWriter() # Add scalar values for i in range(10): writer.add_scalar('loss', i*0.1, i) writer.add_scalar('accuracy', i*0.2, i) # Close the writer writer.close()In this example, we created a `SummaryWriter` object and added scalar values for `loss` and `accuracy` at each epoch. The `add_scalar()` method accepts three arguments: the name of the scalar, the scalar value, and the global step (i.e., epoch). Package/Library: `torch.utils.tensorboard`