def __init__(self, args): super().__init__(args) self.train_loader, self.val_loader, self.test_loader = get_dataloader( args) self.model, self.para_model = prepare_model(args) self.optimizer, self.lr_scheduler = prepare_optimizer(self.model, args)
def __init__(self, args): super().__init__(args) self.train_loader, self.valset, self.testset = get_dataloader(args) init_summary_writer(args.filename) self.model, self.para_model = prepare_model(args) # for n, p in self.para_model.named_parameters(): # p.register_hook(save_grad(n)) self.optimizer, self.lr_scheduler = prepare_optimizer(self.model, args)
def __init__(self, args): super().__init__(args) self.trainset, self.valset, self.trainvalset, self.testset, self.traintestset, \ self.train_fsl_loader, self.train_gfsl_loader, self.val_fsl_loader, self.val_gfsl_loader, self.test_fsl_loader, self.test_gfsl_loader = get_dataloader(args) assert (len(self.train_gfsl_loader) == len(self.train_fsl_loader)) if self.val_gfsl_loader is not None: assert (len(self.val_gfsl_loader) == len(self.val_fsl_loader)) if self.test_gfsl_loader is not None: assert (len(self.test_gfsl_loader) == len(self.test_fsl_loader)) self.model = prepare_model(args) self.optimizer, self.lr_scheduler = prepare_optimizer( self.model, args, len(self.train_gfsl_loader)) self.max_steps = len(self.train_fsl_loader) * args.max_epoch