config = create_from_json(arguments.config_json_path) arguments.output.mkdir(exist_ok=True) config.save_as_json((arguments.output / 'config.json').absolute()) # model if config.train.gpu >= 0: cuda.get_device_from_id(config.train.gpu).use() predictor, discriminator = create(config.model) models = { 'predictor': predictor, 'discriminator': discriminator, } # dataset dataset = create_dataset(config.dataset) train_iter = MultiprocessIterator(dataset['train'], config.train.batchsize) test_iter = MultiprocessIterator(dataset['test'], config.train.batchsize, repeat=False, shuffle=False) train_eval_iter = MultiprocessIterator(dataset['train_eval'], config.train.batchsize, repeat=False, shuffle=False) # optimizer def create_optimizer(model): optimizer = optimizers.Adam(alpha=0.0002, beta1=0.5, beta2=0.999) optimizer.setup(model)
config = create_from_json(arguments.config_json_path) arguments.output.mkdir(exist_ok=True) config.save_as_json((arguments.output / 'config.json').absolute()) # model if config.train.gpu >= 0: cuda.get_device_from_id(config.train.gpu).use() predictor, discriminator = create(config.model) models = { 'predictor': predictor, 'discriminator': discriminator, } # dataset dataset = create_dataset(config.dataset) train_iter = MultiprocessIterator(dataset['train'], config.train.batchsize) test_iter = MultiprocessIterator(dataset['test'], config.train.batchsize, repeat=False, shuffle=False) train_eval_iter = MultiprocessIterator(dataset['train_eval'], config.train.batchsize, repeat=False, shuffle=False) # optimizer def create_optimizer(model): optimizer = optimizers.Adam(alpha=0.0002, beta1=0.5, beta2=0.999) optimizer.setup(model) return optimizer opts = {key: create_optimizer(model) for key, model in models.items()} # updater