def __init__(self, cfg): super(Solver, self).__init__(cfg) self.init_epoch = self.cfg['schedule'] net_name = self.cfg['algorithm'].lower() lib = importlib.import_module('model.' + net_name) net = lib.Net self.model = net(num_channels=self.cfg['data']['n_colors'], base_filter=64, args=self.cfg) self.optimizer = maek_optimizer(self.cfg['schedule']['optimizer'], cfg, self.model.parameters()) self.loss = make_loss(self.cfg['schedule']['loss']) self.log_name = self.cfg['algorithm'] + '_' + str( self.cfg['data']['upsacle']) + '_' + str(self.timestamp) # save log self.writer = SummaryWriter('log/' + str(self.log_name)) save_net_config(self.log_name, self.model) save_yml(cfg, os.path.join('log/' + str(self.log_name), 'config.yml')) save_config( self.log_name, 'Train dataset has {} images and {} batches.'.format( len(self.train_dataset), len(self.train_loader))) save_config( self.log_name, 'Val dataset has {} images and {} batches.'.format( len(self.val_dataset), len(self.val_loader))) save_config( self.log_name, 'Model parameters: ' + str(sum(param.numel() for param in self.model.parameters())))
def __init__(self, cfg): super(Solver, self).__init__(cfg) self.init_epoch = self.cfg['schedule'] net_name = self.cfg['algorithm'].lower() lib = importlib.import_module('model.' + net_name) net = lib.Net if self.cfg['algorithm'] == 'VDSR' or self.cfg['algorithm'] == 'SRCNN': train_dataset = DatasetFromHdf5("data/train.h5") self.train_loader = DataLoader(train_dataset, cfg['data']['batch_size'], shuffle=False, num_workers=1) val_dataset = DatasetFromHdf5("data/test.h5") self.val_loader = DataLoader(val_dataset, cfg['data']['batch_size'], shuffle=False, num_workers=1) self.model = net(args=self.cfg) self.optimizer = maek_optimizer(self.cfg['schedule']['optimizer'], cfg, self.model.parameters()) self.milestones = list( map(lambda x: int(x), self.cfg['schedule']['decay'].split('-'))) self.scheduler = lr_scheduler.MultiStepLR( self.optimizer, self.milestones, gamma=self.cfg['schedule']['gamma'], last_epoch=-1) self.loss = make_loss(self.cfg['schedule']['loss']) self.log_name = self.cfg['algorithm'] + '_' + str( self.cfg['data']['upsacle']) + '_' + str(self.timestamp) # save log self.writer = SummaryWriter('log/' + str(self.log_name)) # if not self.cfg['algorithm'] == 'SMSR': # summary(self.model, (3, self.cfg['data']['patch_size'], self.cfg['data']['patch_size']), device='cpu') save_net_config(self.log_name, self.model) save_net_py(self.log_name, net_name) save_yml(cfg, os.path.join('log/' + str(self.log_name), 'config.yml')) save_config( self.log_name, 'Train dataset has {} images and {} batches.'.format( len(self.train_dataset), len(self.train_loader))) save_config( self.log_name, 'Val dataset has {} images and {} batches.'.format( len(self.val_dataset), len(self.val_loader))) save_config( self.log_name, 'Model parameters: ' + str(sum(param.numel() for param in self.model.parameters())))
def __init__(self, cfg): super(Solver, self).__init__(cfg) self.init_epoch = self.cfg['schedule'] net_name = self.cfg['algorithm'].lower() lib = importlib.import_module('model.' + net_name) net = lib.Net if self.cfg['algorithm'] == 'VDSR' or self.cfg['algorithm'] == 'SRCNN': train_dataset = DatasetFromHdf5("data/train.h5") self.train_loader = DataLoader(train_dataset, cfg['data']['batch_size'], shuffle=False, num_workers=1) val_dataset = DatasetFromHdf5("data/test.h5") self.val_loader = DataLoader(val_dataset, cfg['data']['batch_size'], shuffle=False, num_workers=1) self.model = net(num_channels=self.cfg['data']['n_colors'], base_filter=64, scale_factor=self.cfg['data']['upsacle'], args=self.cfg) self.optimizer = maek_optimizer(self.cfg['schedule']['optimizer'], cfg, self.model.parameters()) self.loss = make_loss(self.cfg['schedule']['loss']) self.log_name = self.cfg['algorithm'] + '_' + str( self.cfg['data']['upsacle']) + '_' + str(self.timestamp) # save log self.writer = SummaryWriter('log/' + str(self.log_name)) summary(self.model, (3, self.cfg['data']['patch_size'], self.cfg['data']['patch_size']), device='cpu') save_net_config(self.log_name, self.model) save_net_py(self.log_name, net_name) save_yml(cfg, os.path.join('log/' + str(self.log_name), 'config.yml')) save_config( self.log_name, 'Train dataset has {} images and {} batches.'.format( len(self.train_dataset), len(self.train_loader))) save_config( self.log_name, 'Val dataset has {} images and {} batches.'.format( len(self.val_dataset), len(self.val_loader))) save_config( self.log_name, 'Model parameters: ' + str(sum(param.numel() for param in self.model.parameters())))
def __init__(self, cfg, name): super(Solver, self).__init__(cfg) self.init_epoch = self.cfg['schedule'] net_name = self.cfg['algorithm'].lower() lib = importlib.import_module('model.' + net_name) net = lib.Net self.model = net(num_channels=self.cfg['data']['n_colors'], base_filter=64, scale_factor=self.cfg['data']['upsacle'], args=self.cfg) self.train_dataset = get_data( self.cfg, str(self.cfg['train_dataset']) + '/' + str(name) + '.png', str(self.cfg['train_dataset']) + '/' + str(name) + '.png', self.cfg['data']['upsacle']) self.train_loader = DataLoader(self.train_dataset, self.cfg['data']['batch_size'], shuffle=False, num_workers=self.num_workers) for iteration, batch in enumerate(self.train_loader, 1): lr, hr, bic, hr_ref, bic_ref, file_name = Variable( batch[0]), Variable(batch[1]), Variable(batch[2]), Variable( batch[4]), Variable(batch[5]), (batch[6]) self.hr_ref = hr_ref self.lr = lr self.file_name = file_name self.noise_init = get_noise( 32, 'noise', (self.cfg['data']['patch_size'] * self.cfg['data']['upsacle'], self.cfg['data']['patch_size'] * self.cfg['data']['upsacle'])) self.noise = self.noise_init.detach().clone() self.optimizer = maek_optimizer(self.cfg['schedule']['optimizer'], cfg, self.model.parameters()) self.loss = CycleLoss(scale=1 / 4, loss_type='MSE') self.log_name = self.cfg['algorithm'] + '_' + str( self.cfg['data']['upsacle']) + '_' + str(self.timestamp) # save log self.writer = SummaryWriter('log/' + str(self.log_name)) save_net_config(self.log_name, self.model) save_yml(cfg, os.path.join('log/' + str(self.log_name), 'config.yml'))