src_train_mean = train_stats['src_train_mean'][:]
    src_train_std = train_stats['src_train_std'][:]
    trg_train_mean = train_stats['trg_train_mean'][:]
    trg_train_std = train_stats['trg_train_std'][:]

    train_stats.close()

# Load test data
print('Loading test data...', end='')
with h5py.File('data/test_datatable.h5', 'r') as test_datatable:
    test_data = test_datatable['test_data'][:, :]

    test_datatable.close()

src_test_data = test_data[:, 0:40]  # Source data
src_test_data = utils.reshape_lstm(src_test_data, tsteps, data_dim)
src_test_data = (src_test_data - src_train_mean) / src_train_std

trg_test_data = test_data[:, 43:83]  # Target data
print('done')

################
# Predict data #
################
print('Predicting')
prediction_test = model.predict(src_test_data, batch_size=batch_size)
prediction_test = prediction_test.reshape(-1, data_dim)

# De-normalize predicted output
prediction_test = (prediction_test * trg_train_std) + trg_train_mean
Ejemplo n.º 2
0
# Remove means and normalize
src_train_mean = np.mean(src_train_data, axis=0)
src_train_std = np.std(src_train_data, axis=0)

src_train_data = (src_train_data - src_train_mean) / src_train_std
src_valid_data = (src_valid_data - src_train_mean) / src_train_std
src_test_data = (src_test_data - src_train_mean) / src_train_std

trg_train_mean = np.mean(trg_train_data, axis=0)
trg_train_std = np.std(trg_train_data, axis=0)

trg_train_data = (trg_train_data - trg_train_mean) / trg_train_std
trg_valid_data = (trg_valid_data - trg_train_mean) / trg_train_std

# Zero-pad and reshape data
src_train_data = utils.reshape_lstm(src_train_data, tsteps, data_dim)
src_valid_data = utils.reshape_lstm(src_valid_data, tsteps, data_dim)
src_test_data = utils.reshape_lstm(src_test_data, tsteps, data_dim)

trg_train_data = utils.reshape_lstm(trg_train_data, tsteps, data_dim)
trg_valid_data = utils.reshape_lstm(trg_valid_data, tsteps, data_dim)

# Save training statistics
with h5py.File('models/mcp_train_stats.h5', 'w') as f:
  h5_src_train_mean = f.create_dataset("src_train_mean", data=src_train_mean)
  h5_src_train_std = f.create_dataset("src_train_std", data=src_train_std)
  h5_trg_train_mean = f.create_dataset("trg_train_mean", data=trg_train_mean)
  h5_trg_train_std = f.create_dataset("trg_train_std", data=trg_train_std)

  f.close()
Ejemplo n.º 3
0
# Remove means and normalize
src_train_mean = np.mean(src_train_data, axis=0)
src_train_std = np.std(src_train_data, axis=0)

src_train_data = (src_train_data - src_train_mean) / src_train_std
src_valid_data = (src_valid_data - src_train_mean) / src_train_std
src_test_data = (src_test_data - src_train_mean) / src_train_std

trg_train_mean = np.mean(trg_train_data, axis=0)
trg_train_std = np.std(trg_train_data, axis=0)

trg_train_data = (trg_train_data - trg_train_mean) / trg_train_std
trg_valid_data = (trg_valid_data - trg_train_mean) / trg_train_std

# Zero-pad and reshape data
src_train_data = utils.reshape_lstm(src_train_data, tsteps, data_dim)
src_valid_data = utils.reshape_lstm(src_valid_data, tsteps, data_dim)
src_test_data = utils.reshape_lstm(src_test_data, tsteps, data_dim)

trg_train_data = utils.reshape_lstm(trg_train_data, tsteps, data_dim)
trg_valid_data = utils.reshape_lstm(trg_valid_data, tsteps, data_dim)

# Save training statistics
with h5py.File('models/mcp_train_stats.h5', 'w') as f:
    h5_src_train_mean = f.create_dataset("src_train_mean", data=src_train_mean)
    h5_src_train_std = f.create_dataset("src_train_std", data=src_train_std)
    h5_trg_train_mean = f.create_dataset("trg_train_mean", data=trg_train_mean)
    h5_trg_train_std = f.create_dataset("trg_train_std", data=trg_train_std)

    f.close()
Ejemplo n.º 4
0
    mcp_params = parse_file(40,
                            'data/test/vocoded/SF1/' + basename + '.mcp.dat')
    lf0_params = parse_file(1,
                            'data/test/vocoded/SF1/' + basename + '.lf0.i.dat')
    mvf_params = parse_file(1,
                            'data/test/vocoded/SF1/' + basename + '.vf.i.dat')

    # Compute U/V flags
    assert mvf_params.shape == lf0_params.shape
    uv_flags = np.empty(mvf_params.shape)
    for index, vf in enumerate(uv_flags):
        uv_flags[index] = 1 - utils.kronecker_delta(mvf_params[index])

    # Prepare data for prediction
    mcp_params = (mcp_params - src_mcp_mean) / src_mcp_std
    mcp_params = utils.reshape_lstm(mcp_params, mcp_tsteps, mcp_data_dim)

    lf0_params = (lf0_params - src_lf0_mean) / src_lf0_std
    lf0_params = utils.reshape_lstm(np.column_stack((lf0_params, uv_flags)),
                                    lf0_tsteps, lf0_data_dim)

    mvf_params = (mvf_params - src_mvf_mean) / src_mvf_std
    mvf_params = utils.apply_context(mvf_params, context_size)  # Apply context

    ######################
    # Predict parameters #
    ######################
    mvf_prediction = mvf_model.predict(np.column_stack((mvf_params, uv_flags)))
    mvf_prediction[:, 0] = (mvf_prediction[:, 0] * trg_mvf_std) + trg_mvf_mean

    lf0_prediction = lf0_model.predict(lf0_params, batch_size=lf0_batch_size)
    trg_train_mean = train_stats['trg_train_mean'].value
    trg_train_std = train_stats['trg_train_std'].value

    train_stats.close()

# Load test data
print('Loading test data...', end='')
with h5py.File('data/test_datatable.h5', 'r') as test_datatable:
    test_data = test_datatable['test_data'][:, :]

    test_datatable.close()

src_test_frames = np.column_stack((test_data[:, 40], test_data[:, 42]))
src_test_frames[:,
                0] = (src_test_frames[:, 0] - src_train_mean) / src_train_std
src_test_frames = utils.reshape_lstm(src_test_frames, tsteps, data_dim)

# Zero-pad and reshape target test data
trg_test_frames = utils.reshape_lstm(
    np.column_stack((test_data[:, 83], test_data[:, 85])), tsteps,
    data_dim).reshape(-1, 2)

print('done')

################
# Predict data #
################
print('Predicting')
prediction_test = model.predict(src_test_frames, batch_size=batch_size)
prediction_test = prediction_test.reshape(-1, 2)
  src_train_mean = train_stats['src_train_mean'][:]
  src_train_std = train_stats['src_train_std'][:]
  trg_train_mean = train_stats['trg_train_mean'][:]
  trg_train_std = train_stats['trg_train_std'][:]

  train_stats.close()

# Load test data
print('Loading test data...', end='')
with h5py.File('data/test_datatable.h5', 'r') as test_datatable:
  test_data = test_datatable['test_data'][:, :]

  test_datatable.close()

src_test_data = test_data[:, 0:40]  # Source data
src_test_data = utils.reshape_lstm(src_test_data, tsteps, data_dim)
src_test_data = (src_test_data - src_train_mean) / src_train_std

trg_test_data = test_data[:, 43:83]  # Target data
print('done')

################
# Predict data #
################
print('Predicting')
prediction_test = model.predict(src_test_data, batch_size=batch_size)
prediction_test = prediction_test.reshape(-1, data_dim)

# De-normalize predicted output
prediction_test = (prediction_test * trg_train_std) + trg_train_mean