Exemple #1
0
num_valid_file = 399

if b_16k:
    n_ins = 87
    n_outs = 187
else:
    n_ins = 87
    n_outs = 193
dir_lab_norm = 'nn_no_silence_lab_norm_' + str(n_ins)
dir_cmp_norm = 'nn_norm_mgc_lf0_vuv_bap_' + str(n_outs)
file_names = 'file_id_list_full.scp'

# prepare the training data
file_list = pap.read_file_list(dir_base + file_names)
cmp_norm_file_list = pap.prepare_file_path_list(file_list,
                                                dir_base + dir_cmp_norm,
                                                '.cmp')
lab_norm_file_list = pap.prepare_file_path_list(file_list,
                                                dir_base + dir_lab_norm,
                                                '.lab')

train_x_file_list = lab_norm_file_list[0:num_train_file]
train_y_file_list = cmp_norm_file_list[0:num_train_file]
valid_x_file_list = lab_norm_file_list[num_train_file:num_train_file +
                                       num_valid_file]
valid_y_file_list = cmp_norm_file_list[num_train_file:num_train_file +
                                       num_valid_file]

train_data_reader = dap.ListDataProvider(x_file_list=train_x_file_list,
                                         y_file_list=train_y_file_list,
                                         n_ins=n_ins,
Exemple #2
0
    n_outs = 193
dir_lab_norm = 'nn_no_silence_lab_norm_' + str(n_ins)
file_names = 'test_id_list.scp'

saved_epoch = 1
saved_ckp_model = dir_base + 'models/mxnet_bigru_sym_'
output_dir = dir_base + 'synthesis'
if not os.path.exists(output_dir):
    os.makedirs(output_dir)

sym, arg_params, aux_params = mx.model.load_checkpoint(saved_ckp_model,
                                                       saved_epoch)

file_list = pap.read_file_list(dir_base + file_names)
lab_norm_file_list = pap.prepare_file_path_list(file_list,
                                                dir_base + dir_lab_norm,
                                                '.lab')
generate_file_list = pap.prepare_file_path_list(file_list, output_dir, '.cmp')

# generate cmp file
num_file = len(lab_norm_file_list)
for i in range(num_file):
    # read label feature from file
    features = np.fromfile(lab_norm_file_list[i], dtype=np.float32)
    # evaluation
    features = features[:(n_ins * (features.size / n_ins))]
    input_labels = features.reshape((-1, n_ins))
    mod = mx.mod.Module(symbol=sym, context=mx.gpu(), label_names=[])
    mod.bind(data_shapes=[('data', input_labels.shape)], for_training=False)
    mod.set_params(arg_params, aux_params, allow_missing=True)
    mod.forward(mx.io.DataBatch([nd.array(input_labels)]))