Exemple #1
0
def run():
 
    opt = options_train_executor.parse()

    print('===== arguments: program executor =====')
    for key, val in vars(opt).items():
        print("{:20} {}".format(key, val))
    print('===== arguments: program executor =====')

    if not os.path.isdir(opt.save_folder):
        os.makedirs(opt.save_folder)


    # build dataloader
    train_loader = gdata.DataLoader(
        dataset=train_set,
        batch_size=opt.batch_size,
        shuffle=True,
        num_workers=opt.num_workers,
    )

    val_set = PartPrimitive(opt.val_file)
    val_loader = gdata.DataLoader(
        dataset=val_set,
        batch_size=opt.batch_size,
        shuffle=True,
        num_workers=opt.num_workers,
    )

    # build the model
    ctx = d2l.try_gpu()                   
    model = RenderNet(opt)
    model.initialize(init = init.Xavier(),ctx = ctx)                                        

    loss = gloss.SoftmaxCrossEntropyLoss(axis = 1,weight = 5)

    optimizer = Trainer(model.collect_params(),"adam",
                        {"learning_rate":opt.learning_rate,"wd":opt.weight_decay,
                         'beta1':opt.beta1, 'beta2':opt.beta2})
    train_from0 = False;
    if train_from0:
        if os.path.exists('./model of executor'):
            model.load_parameters('model of executor')
            print("loaded parameter of model")
        if os.path.exists('./optimizer of executor'):
            optimizer.load_states('optimizer of executor')
            print("loaded state of trainer")
            
    for epoch in range(1, opt.epochs+1):
        adjust_learning_rate(epoch, opt, optimizer)
        print("###################")
        print("training")
        train(epoch, train_loader, model,loss,optimizer, opt,ctx,train_loss,train_iou)

        print("###################")

        print("testing")
        '''
        gen_shapes, ori_shapes = validate(epoch, val_loader, model,
                                          loss, opt,ctx, val_loss,val_iou, gen_shape=True)
        gen_shapes = (gen_shapes > 0.5)
        gen_shapes = gen_shapes.astype(np.float32)
        iou = BatchIoU(ori_shapes, gen_shapes)
        print("Mean IoU: {:.3f}".format(iou.mean().asscalar()))
        '''
        if epoch % opt.save_interval == 0:
            print('Saving...')
            optimizer.save_states("optimizer of executor_3"),
            model.save_parameters("model of executor_3")


    print('Saving...')
    optimizer.save_states("optimizer of executor_3"),
    model.save_parameters("model of executor_3")
bce_loss = gloss.SigmoidBinaryCrossEntropyLoss()
# initialize parameters of training
init_epoch = 0
best_iou = -1
best_epoch = -1
batch_size = cfg.CONST.BATCH_SIZE
# Load pretrained model if exists
if cfg.TRAIN.RESUME_TRAIN:
    print('[INFO] %s Recovering weights' % dt.now())
    checkpoint = nd.load("check_point")
    init_epoch = checkpoint[0][0].asscalar()
    best_iou = checkpoint[0][1].asscalar()
    best_epoch = checkpoint[0][2].asscalar()
    encoder.load_parameters('encoder_params')
    decoder.load_parameters('decoder_params')
    encoder_trainer.load_states("encoder_trainer")
    decoder_trainer.load_states("decoder_trainer")
    if os.path.exists(
            '/home/hzx/my pix2vox model  with refiner/merger_params'):
        merger.load_parameters("merger_params")
        refiner.load_parameters("refiner_params")
        merger_trainer.load_states("merger_trainer")
        refiner_trainer.load_states("refiner_trainer")
    encoder.hybridize()
    decoder.hybridize()
    merger.hybridize()
    print("net has been hybridized")
    print(
        '[INFO] %s Recover complete. Current epoch #%d, Best IoU = %.4f at epoch #%d.'
        % (dt.now(), init_epoch, best_iou, best_epoch))