Пример #1
0
def main(cfg):
    trainer = pl.Trainer(**cfg.trainer)
    exp_manager(trainer, cfg.get("exp_manager", None))
    model = WaveGlowModel(cfg=cfg.model, trainer=trainer)
    epoch_time_logger = LogEpochTimeCallback()
    trainer.callbacks.extend([epoch_time_logger])
    trainer.fit(model)
Пример #2
0
    def test_export_to_onnx(self):
        model = WaveGlowModel(wcfg)
        model = model.cuda().half()
        typecheck.set_typecheck_enabled(enabled=False)
        with tempfile.TemporaryDirectory() as tmpdir, model.nemo_infer():
            tmp_file_name = os.path.join(tmpdir, "waveglow.onnx")

            n_mels = 80
            # Test export.
            inp = input_example(n_mels)
            inp1 = taco2wg(*inp)
            inp2 = inp1
            res1 = model.waveglow(*inp1)
            res2 = model.waveglow(*inp2)
            assert torch.allclose(res1, res2, rtol=0.01, atol=0.1)
            WaveGlowModel.forward_for_export = forward_wrapper
            model.export(
                tmp_file_name,
                verbose=False,
                input_example=inp,
                output_example=res1,
                try_script=False,
                check_trace=False,
                do_constant_folding=True,
            )
Пример #3
0
    def test_export_to_onnx(self):
        model = WaveGlowModel(wcfg)
        model = model.cuda().half()
        typecheck.set_typecheck_enabled(enabled=False)
        with tempfile.TemporaryDirectory() as tmpdir, model.nemo_infer():
            # Generate filename in the temporary directory.
            # TODO: Change `waveglow.ts` to `waveglow.onnx` for > 21.05
            tmp_file_name = os.path.join("waveglow.ts")

            n_mels = 80
            # Test export.
            inp = input_example(n_mels)
            inp1 = taco2wg(*inp)
            inp2 = inp1
            res1 = model.waveglow(*inp1)
            res2 = model.waveglow(*inp2)
            assert torch.allclose(res1, res2, rtol=0.01, atol=0.1)
            WaveGlowModel.forward_for_export = forward_wrapper
            model.export(
                tmp_file_name,
                verbose=True,
                input_example=inp,
                output_example=res1,
                try_script=False,
                check_trace=False,
                do_constant_folding=True,
                dynamic_axes={"spec": [0], "z": [0], "audio": [0]},
            )
Пример #4
0
    def test_export_to_onnx(self):
        model = WaveGlowModel(wcfg)
        # model = WaveGlowModel.restore_from("../WaveGlow-22050Hz-268M.nemo")
        model = model.cuda().half()
        typecheck.set_typecheck_enabled(enabled=False)
        with tempfile.TemporaryDirectory() as tmpdir, model.nemo_infer():
            # Generate filename in the temporary directory.
            tmp_file_name = os.path.join("waveglow.onnx")

            n_mels = 80
            # Test export.
            inp = input_example(n_mels)
            inp1 = taco2wg(**inp)
            inp2 = inp1
            res1 = model.waveglow(*inp1)
            res2 = model.waveglow(*inp2)
            assert torch.allclose(res1, res2, rtol=0.01, atol=0.1)

            model.export(
                tmp_file_name,
                verbose=True,
                input_example=inp,
                output_example=res1,
                try_script=False,
                check_trace=False,
                do_constant_folding=True,
                dynamic_axes={
                    "spec": [0],
                    "z": [0],
                    "audio": [0]
                },
                forward_method=forward_wrapper,
            )

            try:
                test_runtime = True
                import onnxruntime
            except (ImportError, ModuleNotFoundError):
                test_runtime = False
            if test_runtime:
                omodel = onnx.load(tmp_file_name)
                output_names = ['audio']
                sess = onnxruntime.InferenceSession(omodel.SerializeToString())
                output = sess.run(None, {
                    "spec": inp["spec"].cpu().numpy(),
                    "z": inp["z"].cpu().numpy()
                })[0]
                assert torch.allclose(torch.from_numpy(output),
                                      res2.cpu(),
                                      rtol=1,
                                      atol=100)
Пример #5
0
    def test_export_to_onnx(self):
        model = WaveGlowModel(wcfg).cuda().half()
        with tempfile.TemporaryDirectory() as tmpdir, model.nemo_infer():
            # Generate filename in the temporary directory.
            tmp_file_name = os.path.join("waveglow.onnx")
            # Test export.
            inp = model.waveglow.input_example()
            inp2 = inp
            inp3 = inp2
            res1 = model.waveglow(**inp)
            res2 = model.waveglow(**inp2)
            assert torch.allclose(res1, res2, rtol=0.01, atol=0.1)
            model.export(
                tmp_file_name,
                verbose=True,
                input_example=inp3,
                output_example=res1,
                try_script=False,
                check_trace=False,
            )

            try:
                test_runtime = True
                import onnxruntime
            except (ImportError, ModuleNotFoundError):
                test_runtime = False
            if test_runtime:
                omodel = onnx.load(tmp_file_name)
                output_names = ['audio']
                sess = onnxruntime.InferenceSession(omodel.SerializeToString())
                output = sess.run(None, {
                    "spec": inp["spec"].cpu().numpy(),
                    "z": inp["z"].cpu().numpy()
                })[0]
                assert torch.allclose(torch.from_numpy(output),
                                      res2.cpu(),
                                      rtol=0.01,
                                      atol=0.1)