Exemplo n.º 1
0
                        help='experiment case name of train1')
    parser.add_argument('case2',
                        type=str,
                        help='experiment case name of train2')
    parser.add_argument('-ckpt', help='checkpoint to load model.')
    parser.add_argument('-gpu', help='comma separated list of GPU(s) to use.')
    parser.add_argument('-r',
                        action='store_true',
                        help='start training from the beginning.')
    arguments = parser.parse_args()
    return arguments


if __name__ == '__main__':
    args = get_arguments()
    print(args.case2)
    hp.set_hparam_yaml(args.case2,
                       default_file='hparams/{}.yaml'.format(args.case2))
    logdir_train1 = '{}/{}/train1'.format(hp.logdir_path, args.case1)
    logdir_train2 = '{}/{}/train2'.format(hp.logdir_path, args.case2)

    if args.r:
        remove_all_files(logdir_train2)

    print('case1: {}, case2: {}, logdir1: {}, logdir2: {}'.format(
        args.case1, args.case2, logdir_train1, logdir_train2))

    train(args, logdir1=logdir_train1, logdir2=logdir_train2)

    print("Done")
Exemplo n.º 2
0
    return not_converted


def preprocessing(dataset_path, isConverting=False):

    s = datetime.datetime.now()

    wav_files = glob.glob(dataset_path)
    dataset_path = dataset_path.replace('WAV', 'npz')
    npz_files = glob.glob(dataset_path)

    if len(npz_files) is 0:
        generate_npz(wav_files)
    else:
        convert_list = matching_list(wav_files, npz_files)

        if len(convert_list) is 0:
            print('All WAV files in dataset directory are already converted!')
        else:
            generate_npz(convert_list)

    e = datetime.datetime.now()
    diff = e - s
    print("Done. elapsed time:{}s".format(diff.seconds))


if __name__ == '__main__':
    hp.set_hparam_yaml("TIMIT2")
    data_path = "/home/cocoonmola/datasets/TIMIT2/TRAIN/*/*/*.WAV"
    preprocessing(data_path)
Exemplo n.º 3
0
                        type=str,
                        help='experiment case name of train1')
    parser.add_argument('case2',
                        type=str,
                        help='experiment case name of train2')
    parser.add_argument('-ckpt', help='checkpoint to load model.')
    parser.add_argument('-gpu', help='comma separated list of GPU(s) to use.')
    parser.add_argument('-r',
                        action='store_true',
                        help='start training from the beginning.')
    arguments = parser.parse_args()
    return arguments


if __name__ == '__main__':
    args = get_arguments()
    hp.set_hparam_yaml(args.case2)
    logdir_train1 = '{}/{}/train1'.format(hp.logdir_path, args.case1)
    logdir_train2 = '{}/{}/train2'.format(hp.logdir_path, args.case2)

    if args.r:
        remove_all_files(logdir_train2)

    print('case1: {}, case2: {}, logdir1: {}, logdir2: {}'.format(
        args.case1, args.case2, logdir_train1, logdir_train2))
    print('dataset : {}'.format(hp.train2.data_path))

    train(args, logdir1=logdir_train1, logdir2=logdir_train2)

    print("Done")
Exemplo n.º 4
0
def ckpt2mel(predictor, ppgs_dir, mel_dir, save_dir):
    print("get into ckpt")
    for fi in os.listdir(ppgs_dir):
        print("fi",fi)
        #ppgs_name = os.path.join(ppgs_dir, fi)

        mel, ppgs = queue_input(fi, ppgs_dir, mel_dir)
        pred_mel = predictor(mel, ppgs)
        #print("pred_mel",pred_mel.size())
        pred_mel = np.array(pred_mel)
        print("pred_mel",pred_mel.shape)
        length = pred_mel.shape[2]
        width =  pred_mel.shape[3]
        pred_mel = pred_mel.reshape((length, width))
        save_name = fi.split('.npy')[0]
        if hp.default.n_mels == 20:
            npy_dir = os.path.join(save_dir,'lpc20')
            if not os.path.exists(npy_dir):
                os.makedirs(npy_dir)
            npy_path = os.path.join(npy_dir, '%s_20.npy' %save_name)
            np.save(npy_path, pred_mel)
            print('saved',npy_dir)if hp.default.n_mels == 32:
            npy_dir = os.path.join(save_dir,'lpc32')
            if not os.path.exists(npy_dir):
                os.makedirs(npy_dir)
            npy_path = os.path.join(npy_dir, '%s_32.npy' %save_name)
            np.save(npy_path, pred_mel)
            print('saved',npy_dir)def do_convert(args, logdir2):
    # Load graph
    model = Net2()
    index = 0
    ppgs_dir = hp.convert.ppgs_path
    mel_dir = hp.convert.mel_path
    #for fi in os.listdir(ppgs_dir):
    #print("fi",fi)
    #ppgs_path = os.path.join(ppgs_dir, fi)
    #df = Net2DataFlow(hp.convert.mel_path, ppgs_path, hp.convert.batch_size)
    #print("finish df")
    ckpt2 = '{}/{}'.format(logdir2, args.ckpt) if args.ckpt else tf.train.latest_checkpoint(logdir2)
    print("ckpt2",ckpt2)
    session_inits = []
    if ckpt2:
        session_inits.append(SaverRestore(ckpt2))
    pred_conf = PredictConfig( model=model,
                     input_names=get_eval_input_names(),
                     output_names=get_eval_output_names(),
                     session_init=ChainInit(session_inits))
    predictor = OfflinePredictor(pred_conf)
    print("after predictor")
    #import pdb;pdb.set_trace()
    ckpt2mel(predictor, ppgs_dir, mel_dir, hp.convert.save_path)
    print("success")
    def get_arguments():
    parser = argparse.ArgumentParser()
    parser.add_argument('case2', type=str, help='experiment case name of train2')
    parser.add_argument('-ckpt', help='checkpoint to load model.')
    arguments = parser.parse_args()
    return arguments


if __name__ == '__main__':
    args = get_arguments()
    hp.set_hparam_yaml(args.case2)
    logdir_train2 = '{}/{}/train2'.format(hp.logdir_path, args.case2)

    print('case2: {},logdir2: {}'.format(args.case2, logdir_train2))

    s = datetime.datetime.now()

    do_convert(args, logdir2=logdir_train2)

    e = datetime.datetime.now()
    diff = e - s
    print("Done. elapsed time:{}s".format(diff.seconds))