def setup(self, config): model = model_creator(config) optimizer = optimizer_creator(model, config) train_loader, val_loader = data_creator(config) self.model, self.optimizer, = \ self.register( models=model, optimizers=optimizer, ddp_args={ "find_unused_parameters": True}) assert self.model.find_unused_parameters
def __init__(self, config): super().__init__() self.config = config if "layer" in config: self.layer = copy.deepcopy(config["layer"]) else: self.layer = model_creator(self.config) self.rand_int = np.random.randint(10)
def setup(self, config): model = model_creator(config) optimizer = optimizer_creator(model, config) train_loader, val_loader = data_creator(config) scheduler = scheduler_creator(optimizer, config) loss = nn.MSELoss() self.model, self.optimizer, self.criterion, self.scheduler = self.register( models=model, optimizers=optimizer, criterion=loss, schedulers=scheduler ) self.register_data(train_loader=train_loader, validation_loader=val_loader)