def run(): # get options opt = options_guided_adaptation.parse() opt_gen = options_train_generator.parse() opt_exe = options_train_executor.parse() print('===== arguments: guided adaptation =====') for key, val in vars(opt).items(): print("{:20} {}".format(key, val)) print('===== arguments: guided adaptation =====') if not os.path.isdir(opt.save_folder): os.makedirs(opt.save_folder) # build loaders train_set = ShapeNet3D(opt.train_file) train_loader = gdata.DataLoader(dataset=train_set, batch_size=opt.batch_size, shuffle=True, num_workers=opt.num_workers) val_set = ShapeNet3D(opt.val_file) val_loader = gdata.DataLoader(dataset=val_set, batch_size=opt.batch_size, shuffle=False, num_workers=opt.num_workers) def visual(path, epoch, gen_shapes, file_name, nums_samples): data = gen_shapes.transpose((0, 3, 2, 1)) data = np.flip(data, axis=2) num_shapes = data.shape[0] for i in range(min(nums_samples, num_shapes)): voxels = data[i] save_name = os.path.join(path, file_name.format(epoch, i)) visualization(voxels, threshold=0.1, save_name=save_name, uniform_size=0.9) ctx = d2l.try_gpu() # load program generator generator = BlockOuterNet(opt_gen) generator.init_blocks(ctx) generator.load_parameters("model of blockouternet") # load program executor executor = RenderNet(opt_exe) executor.initialize(init=init.Xavier(), ctx=ctx) executor.load_parameters("model of executor") # build loss functions criterion = gloss.SoftmaxCrossEntropyLoss(axis=1, from_logits=True) optimizer = Trainer( generator.collect_params(), "adam", { "learning_rate": opt.learning_rate, "wd": opt.weight_decay, 'beta1': opt.beta1, 'beta2': opt.beta2, 'clip_gradient': opt.grad_clip }) print("###################") print("testing") gen_shapes, ori_shapes = validate(0, val_loader, generator, opt, ctx, gen_shape=True) #visual('imgs of chairs/adaption/chair/',0,ori_shapes,'GT {}-{}.png',8) #visual('imgs of chairs/adaption/chair/',0,gen_shapes,'epoch{}-{}.png',8) gen_shapes = nd.from_numpy(gen_shapes) ori_shapes = nd.from_numpy(ori_shapes) #print(gen_shapes.dtype,ori_shapes.dtype) #print("done",ori_shapes.shape,gen_shapes.shape) IoU = BatchIoU(gen_shapes, ori_shapes) #print(IoU) print("iou: ", IoU.mean()) best_iou = 0 print(opt.epochs) for epoch in range(1, opt.epochs + 1): print("###################") print("adaptation") train(epoch, train_loader, generator, executor, criterion, optimizer, opt, ctx) print("###################") print("testing") gen_shapes, ori_shapes = validate(epoch, val_loader, generator, opt, ctx, gen_shape=True) #visual('imgs of chairs/adaption/chair/',epoch,gen_shapes,'epoch{}-{}.png',8) gen_shapes = nd.from_numpy(gen_shapes) ori_shapes = nd.from_numpy(ori_shapes) IoU = BatchIoU(gen_shapes, ori_shapes) print("iou: ", IoU.mean()) if epoch % opt.save_interval == 0: print('Saving...') generator.save_parameters("generator of GA on shapenet") optimizer.save_states("optimazer of generator of GA on shapenet") if IoU.mean() >= best_iou: print('Saving best model') generator.save_parameters("generator of GA on shapenet") optimizer.save_states("optimazer of generator of GA on shapenet") best_iou = IoU.mean()
def run(): opt = options_train_executor.parse() print('===== arguments: program executor =====') for key, val in vars(opt).items(): print("{:20} {}".format(key, val)) print('===== arguments: program executor =====') if not os.path.isdir(opt.save_folder): os.makedirs(opt.save_folder) # build dataloader train_loader = gdata.DataLoader( dataset=train_set, batch_size=opt.batch_size, shuffle=True, num_workers=opt.num_workers, ) val_set = PartPrimitive(opt.val_file) val_loader = gdata.DataLoader( dataset=val_set, batch_size=opt.batch_size, shuffle=True, num_workers=opt.num_workers, ) # build the model ctx = d2l.try_gpu() model = RenderNet(opt) model.initialize(init = init.Xavier(),ctx = ctx) loss = gloss.SoftmaxCrossEntropyLoss(axis = 1,weight = 5) optimizer = Trainer(model.collect_params(),"adam", {"learning_rate":opt.learning_rate,"wd":opt.weight_decay, 'beta1':opt.beta1, 'beta2':opt.beta2}) train_from0 = False; if train_from0: if os.path.exists('./model of executor'): model.load_parameters('model of executor') print("loaded parameter of model") if os.path.exists('./optimizer of executor'): optimizer.load_states('optimizer of executor') print("loaded state of trainer") for epoch in range(1, opt.epochs+1): adjust_learning_rate(epoch, opt, optimizer) print("###################") print("training") train(epoch, train_loader, model,loss,optimizer, opt,ctx,train_loss,train_iou) print("###################") print("testing") ''' gen_shapes, ori_shapes = validate(epoch, val_loader, model, loss, opt,ctx, val_loss,val_iou, gen_shape=True) gen_shapes = (gen_shapes > 0.5) gen_shapes = gen_shapes.astype(np.float32) iou = BatchIoU(ori_shapes, gen_shapes) print("Mean IoU: {:.3f}".format(iou.mean().asscalar())) ''' if epoch % opt.save_interval == 0: print('Saving...') optimizer.save_states("optimizer of executor_3"), model.save_parameters("model of executor_3") print('Saving...') optimizer.save_states("optimizer of executor_3"), model.save_parameters("model of executor_3")
def run(): opt = options_train_generator.parse() print('===== arguments: program generator =====') for key, val in vars(opt).items(): print("{:20} {}".format(key, val)) print('===== arguments: program generator =====') if not os.path.isdir(opt.save_folder): os.makedirs(opt.save_folder) # build model ctx = d2l.try_gpu() model = BlockOuterNet(opt) model.init_blocks(ctx) crit_cls = LSTMClassCriterion() crit_reg = LSTMRegressCriterion() ctri_cls = crit_cls.initialize(ctx=ctx) ctri_reg = crit_reg.initialize(ctx=ctx) optimizer = Trainer( model.collect_params(), "adam", { "learning_rate": opt.learning_rate, "wd": opt.weight_decay, 'beta1': opt.beta1, 'beta2': opt.beta2, 'clip_gradient': opt.grad_clip }) # build dataloader train_set = Synthesis3D(opt.train_file, n_block=opt.outer_seq_length) train_loader = gdata.DataLoader( dataset=train_set, batch_size=opt.batch_size, shuffle=True, num_workers=opt.num_workers, ) val_set = Synthesis3D(opt.val_file, n_block=opt.outer_seq_length) val_loader = gdata.DataLoader( dataset=val_set, batch_size=opt.batch_size, shuffle=True, num_workers=opt.num_workers, ) for epoch in range(1, opt.epochs + 1): print("###################") print("training") train(epoch, train_loader, model, crit_cls, crit_reg, optimizer, opt, ctx) print("###################") print("testing") validate(epoch, val_loader, model, crit_cls, crit_reg, opt, ctx, True) if epoch % 1 == 0: print('Saving...') optimizer.save_states("optimizer of PG"), model.save_parameters("model of blockouternet") optimizer.save_states("optimizer of PG"), model.save_parameters("model of blockouternet")