def test_list_pretrained_mobilebert():
    assert len(list_pretrained_mobilebert()) > 0
                        1E-3)
        assert_allclose(nsp_score.asnumpy(), nsp_score_tn.asnumpy(), 1E-3,
                        1E-3)
        assert_allclose(mlm_score.asnumpy(), mlm_score_tn.asnumpy(), 1E-3,
                        1E-3)

        # Test for fp16
        if ctx.device_type == 'gpu':
            pytest.skip('MobileBERT will have nan values in FP16 mode.')
            verify_backbone_fp16(model_cls=MobileBertModel,
                                 cfg=cfg,
                                 ctx=ctx,
                                 inputs=[inputs, token_types, valid_length])


@pytest.mark.remote_required
@pytest.mark.parametrize('model_name', list_pretrained_mobilebert())
def test_mobilebert_get_pretrained(model_name):
    with tempfile.TemporaryDirectory() as root:
        cfg, tokenizer, backbone_params_path, mlm_params_path =\
            get_pretrained_mobilebert(model_name, load_backbone=True, load_mlm=True, root=root)
        assert cfg.MODEL.vocab_size == len(tokenizer.vocab)
        mobilebert_model = MobileBertModel.from_cfg(cfg)
        mobilebert_model.load_parameters(backbone_params_path)
        mobilebert_pretain_model = MobileBertForPretrain(cfg)
        if mlm_params_path is not None:
            mobilebert_pretain_model.load_parameters(mlm_params_path)
        mobilebert_pretain_model = MobileBertForPretrain(cfg)
        mobilebert_pretain_model.backbone_model.load_parameters(
            backbone_params_path)