Ejemplo n.º 1
0
    ]

torch.set_grad_enabled(False)

text2mel = Text2Mel(vocab).eval()
last_checkpoint_file_name = get_last_checkpoint_file_name(
    os.path.join(hp.logdir, '%s-text2mel' % args.dataset))
# last_checkpoint_file_name = 'logdir/%s-text2mel/step-020K.pth' % args.dataset
if last_checkpoint_file_name:
    print("loading text2mel checkpoint '%s'..." % last_checkpoint_file_name)
    load_checkpoint(last_checkpoint_file_name, text2mel, None)
else:
    print("text2mel not exits")
    sys.exit(1)

ssrn = SSRN().eval()
last_checkpoint_file_name = get_last_checkpoint_file_name(
    os.path.join(hp.logdir, '%s-ssrn' % args.dataset))
# last_checkpoint_file_name = 'logdir/%s-ssrn/step-005K.pth' % args.dataset
if last_checkpoint_file_name:
    print("loading ssrn checkpoint '%s'..." % last_checkpoint_file_name)
    load_checkpoint(last_checkpoint_file_name, ssrn, None)
else:
    print("ssrn not exits")
    sys.exit(1)

# synthetize by one by one because there is a batch processing bug!
for i in range(len(SENTENCES)):
    sentences = [SENTENCES[i]]

    max_N = len(SENTENCES[i])
    # "A pot of tea helps to pass the evening.",
    # "Smoky fires lack flame and heat.",
    # "The soft cushion broke the man's fall. But you can't be serious, can you? Right, This is the sentence. So thank me, please.",
    # "The salt breeze came across from the sea.",
    # "The girl at the booth sold fifty bonds."
]

torch.set_grad_enabled(False)

t2m_list = glob.glob(f'logdir/LJ-lj_fixed.csv-256-0.005-64-text2mel/step-*.pth')
# t2m_list = ['logdir/Keira-Keira_all.csv-512-0.005-16-text2mel/step-010000.pth']
# t2m_list = ['logdir/Geralt-Geralt_s5_no_a.csv-256-0.005-32-text2mel/step-093500.pth']
#             'logdir/Geralt-512-0.005-24-text2mel/step-073500 (copy).pth',
#             'logdir/Geralt-512-0.005-24-text2mel/step-063000 (copy).pth']

ssrn = SSRN().to(device).eval()
print("loading ssrn...")
load_checkpoint('trained/ssrn/lj/step-140K.pth', ssrn, None)
# last_checkpoint_file_name = get_last_checkpoint_file_name(os.path.join(hp.logdir, '%s-ssrn' % args.dataset))
# last_checkpoint_file_name = 'logdir/%s-ssrn/step-005K.pth' % args.dataset
# if last_checkpoint_file_name:
#     print("loading ssrn checkpoint '%s'..." % last_checkpoint_file_name)
#     load_checkpoint(last_checkpoint_file_name, ssrn, None)
# else:
#     print("ssrn not exits")
#     sys.exit(1)
if not os.path.isdir(f'samples'):
    os.mkdir(f'samples')

for t2m in t2m_list:
    filename = os.path.splitext(os.path.basename(t2m))[0]
Ejemplo n.º 3
0
use_gpu = torch.cuda.is_available()
print('use_gpu', use_gpu)
if use_gpu:
    torch.backends.cudnn.benchmark = True

train_data_loader = SSRNDataLoader(ssrn_dataset=SpeechDataset(['mags',
                                                               'mels']),
                                   batch_size=24,
                                   mode='train')
valid_data_loader = SSRNDataLoader(ssrn_dataset=SpeechDataset(['mags',
                                                               'mels']),
                                   batch_size=24,
                                   mode='valid')

ssrn = SSRN().cuda()

optimizer = torch.optim.Adam(ssrn.parameters(), lr=hp.ssrn_lr)

start_timestamp = int(time.time() * 1000)
start_epoch = 0
global_step = 0

logger = Logger(args.dataset, 'ssrn')

# load the last checkpoint if exists
last_checkpoint_file_name = get_last_checkpoint_file_name(logger.logdir)
if last_checkpoint_file_name:
    print("loading the last checkpoint: %s" % last_checkpoint_file_name)
    start_epoch, global_step = load_checkpoint(last_checkpoint_file_name, ssrn,
                                               optimizer)
Ejemplo n.º 4
0
torch.set_grad_enabled(False)

text2mel = Text2Mel(vocab).eval()
text2mel = text2mel.cuda()
last_checkpoint_file_name = get_last_checkpoint_file_name(
    os.path.join(hp.logdir, '%s-text2mel' % args.dataset))
# last_checkpoint_file_name = 'logdir/%s-text2mel/step-020K.pth' % args.dataset
if last_checkpoint_file_name:
    print("loading text2mel checkpoint '%s'..." % last_checkpoint_file_name)
    load_checkpoint(last_checkpoint_file_name, text2mel, None)
else:
    print("text2mel not exits")
    sys.exit(1)

ssrn = SSRN().eval()
ssrn = ssrn.cuda()
last_checkpoint_file_name = get_last_checkpoint_file_name(
    os.path.join(hp.logdir, '%s-ssrn' % args.dataset))
# last_checkpoint_file_name = 'logdir/%s-ssrn/step-005K.pth' % args.dataset
if last_checkpoint_file_name:
    print("loading ssrn checkpoint '%s'..." % last_checkpoint_file_name)
    load_checkpoint(last_checkpoint_file_name, ssrn, None)
else:
    print("ssrn not exits")
    sys.exit(1)

# synthetize by one by one because there is a batch processing bug!
for i in range(len(SENTENCES)):

    sentence = SENTENCES[i].split("|")[0]