def __init__(self, save_dir: Optional[str] = None) -> None: if is_master(): from torch.utils import tensorboard self._save_dir = Path(save_dir or ".") self._save_dir.mkdir(exist_ok=True, parents=True) self.writer = tensorboard.SummaryWriter(save_dir) self.writer.add_text("exec", ' '.join(get_args()))
def set_iterator(self, iterator: Iterator ) -> None: if is_master(): self.writer = liblog.tqdm(iterator, ncols=self._ncols) else: self.writer = iterator
def main(cfg: Config): if cfg.gpu is not None: torch.cuda.set_device(cfg.gpu) if homura.is_master(): import rich rich.print(cfg) vs = DATASET_REGISTRY("imagenet") vs.collate_fn = fast_collate if cfg.data.mixup == 0 else gen_mixup_collate( cfg.data.mixup) model = MLPMixers(cfg.model.name)(num_classes=1_000, droppath_rate=cfg.model.droppath_rate) train_da = vs.default_train_da.copy() if cfg.data.autoaugment: train_da.append(AutoAugment()) post_da = [RandomErasing()] if cfg.data.random_erasing else None train_loader, test_loader = vs( batch_size=cfg.data.batch_size, train_da=train_da, post_norm_train_da=post_da, train_size=cfg.data.batch_size * 50 if cfg.debug else None, test_size=cfg.data.batch_size * 50 if cfg.debug else None, num_workers=12) optimizer = homura.optim.AdamW(cfg.optim.lr, weight_decay=cfg.optim.weight_decay, multi_tensor=True) scheduler = homura.lr_scheduler.CosineAnnealingWithWarmup( cfg.optim.epochs, multiplier=cfg.optim.multiplier, warmup_epochs=cfg.optim.warmup_epochs, min_lr=cfg.optim.min_lr) with Trainer(model, optimizer, SmoothedCrossEntropy(cfg.optim.label_smoothing), reporters=[reporters.TensorboardReporter(".")], scheduler=scheduler, use_amp=cfg.amp, use_cuda_nonblocking=True, report_accuracy_topk=5, optim_cfg=cfg.optim, debug=cfg.debug, cfg=cfg.model) as trainer: for ep in trainer.epoch_range(cfg.optim.epochs): trainer.train(train_loader) trainer.test(test_loader) trainer.scheduler.step() if not cfg.no_save: trainer.save(f"outputs/{cfg.model.name}", f"{ep}") print( f"Max Test Accuracy={max(trainer.reporter.history('accuracy/test')):.3f}" )
def save(self, path: str) -> None: if homura.is_master(): path = pathlib.Path(path) path.mkdir(exist_ok=True, parents=True) with (path / f'{self.epoch}.pt').open('wb') as f: torch.save(self.state_dict(), f)