Example #1
0
    # model.set_r(hp.tts_r)

    optimiser = optim.Adam(model.parameters())

    current_step = model.get_step()

    if not force_gta:

        for session in hp.tts_schedule:

            r, lr, max_step, batch_size = session

            if current_step < max_step:

                train_set, attn_example = get_tts_dataset(
                    paths.data, batch_size, r)

                model.set_r(r)

                training_steps = max_step - current_step

                simple_table([(f'Steps with r={r}',
                               str(training_steps // 1000) + 'k Steps'),
                              ('Batch Size', batch_size),
                              ('Learning Rate', lr),
                              ('Outputs/Step (r)', model.get_r())])

                tts_train_loop(model, optimiser, train_set, lr, training_steps,
                               attn_example)

        print('Training Complete.')
                     n_mels=hp.num_mels,
                     fft_bins=hp.num_mels,
                     postnet_dims=hp.tts_postnet_dims,
                     encoder_K=hp.tts_encoder_K,
                     lstm_dims=hp.tts_lstm_dims,
                     postnet_K=hp.tts_postnet_K,
                     num_highways=hp.tts_num_highways,
                     dropout=hp.tts_dropout).cuda()

    paths = Paths(hp.data_path, hp.voc_model_id, hp.tts_model_id)

    model.restore(paths.tts_latest_weights)

    optimiser = optim.Adam(model.parameters())

    train_set = get_tts_dataset(paths.data, batch_size)

    if not force_gta:

        total_steps = 10_000_000 if force_train else hp.tts_total_steps

        simple_table([
            ('Remaining', str(
                (total_steps - model.get_step()) // 1000) + 'k Steps'),
            ('Batch Size', batch_size), ('Learning Rate', lr)
        ])

        tts_train_loop(model, optimiser, train_set, lr, total_steps)

        print('Training Complete.')
        print(