예제 #1
0
def test_extract_and_store_features_from_cut_set(cut_set, executor,
                                                 mix_eagerly):
    extractor = Fbank()
    with TemporaryDirectory() as tmpdir, LilcomFilesWriter(tmpdir) as storage:
        with executor() if executor is not None else no_executor() as ex:
            cut_set_with_feats = cut_set.compute_and_store_features(
                extractor=extractor,
                storage=storage,
                mix_eagerly=mix_eagerly,
                executor=ex)

        # The same number of cuts
        assert len(cut_set_with_feats) == 2

        for orig_cut, feat_cut in zip(cut_set, cut_set_with_feats):
            # The ID is retained
            assert orig_cut.id == feat_cut.id
            # Features were attached
            assert feat_cut.has_features
            # Recording is retained unless mixing a MixedCut eagerly
            should_have_recording = not (mix_eagerly
                                         and isinstance(orig_cut, MixedCut))
            assert feat_cut.has_recording == should_have_recording

        cuts = list(cut_set_with_feats)

        arr = cuts[0].load_features()
        assert arr.shape[0] == 100
        assert arr.shape[1] == extractor.feature_dim(cuts[0].sampling_rate)

        arr = cuts[1].load_features()
        assert arr.shape[0] == 300
        assert arr.shape[1] == extractor.feature_dim(cuts[0].sampling_rate)
예제 #2
0
def test_extract_and_store_features(cut):
    extractor = Fbank(FbankConfig(sampling_rate=8000))
    with TemporaryDirectory() as tmpdir, LilcomFilesWriter(tmpdir) as storage:
        cut_with_feats = cut.compute_and_store_features(extractor=extractor,
                                                        storage=storage)
        arr = cut_with_feats.load_features()
    assert arr.shape[0] == 100
    assert arr.shape[1] == extractor.feature_dim(cut.sampling_rate)
예제 #3
0
def test_extract_and_store_features_from_mixed_cut(cut, mix_eagerly):
    mixed_cut = cut.append(cut)
    extractor = Fbank(FbankConfig(sampling_rate=8000))
    with TemporaryDirectory() as tmpdir, LilcomFilesWriter(tmpdir) as storage:
        cut_with_feats = mixed_cut.compute_and_store_features(
            extractor=extractor, storage=storage, mix_eagerly=mix_eagerly)
        arr = cut_with_feats.load_features()
    assert arr.shape[0] == 200
    assert arr.shape[1] == extractor.feature_dim(mixed_cut.sampling_rate)
예제 #4
0
def test_feature_set_builder_with_augmentation():
    recordings: RecordingSet = RecordingSet.from_json(
        'test/fixtures/audio.json')
    augment_fn = WavAugmenter.create_predefined('pitch_reverb_tdrop',
                                                sampling_rate=8000)
    extractor = Fbank()
    with TemporaryDirectory() as d, LilcomFilesWriter(d) as storage:
        builder = FeatureSetBuilder(feature_extractor=extractor,
                                    storage=storage,
                                    augment_fn=augment_fn)
        feature_set = builder.process_and_store_recordings(
            recordings=recordings)

        assert len(feature_set) == 6

        feature_infos = list(feature_set)

        # Assert the properties shared by all features
        for features in feature_infos:
            # assert that fbank is the default feature type
            assert features.type == 'fbank'
            # assert that duration is always a multiple of frame_shift
            assert features.num_frames == round(features.duration /
                                                features.frame_shift)
            # assert that num_features is preserved
            assert features.num_features == builder.feature_extractor.config.num_mel_bins
            # assert that the storage type metadata matches
            assert features.storage_type == storage.name
            # assert that the metadata is consistent with the data shapes
            arr = features.load()
            assert arr.shape[0] == features.num_frames
            assert arr.shape[1] == features.num_features

        # Assert the properties for recordings of duration 0.5 seconds
        for features in feature_infos[:2]:
            assert features.num_frames == 50
            assert features.duration == 0.5

        # Assert the properties for recordings of duration 1.0 seconds
        for features in feature_infos[2:]:
            assert features.num_frames == 100
            assert features.duration == 1.0
예제 #5
0
    # Post-condition 2: the iterative method yields very close results to
    # the "standard" method.
    true_means = np.mean(np.concatenate([f.load() for f in feature_set]),
                         axis=0)
    true_stds = np.std(np.concatenate([f.load() for f in feature_set]), axis=0)
    np.testing.assert_almost_equal(stats["norm_means"], true_means, decimal=5)
    np.testing.assert_almost_equal(stats["norm_stds"], true_stds, decimal=5)
    # Post-condition 3: the serialization works correctly
    assert (stats["norm_means"] == read_stats["norm_means"]).all()
    assert (stats["norm_stds"] == read_stats["norm_stds"]).all()


@pytest.mark.parametrize(
    "storage_fn",
    [
        lambda: LilcomFilesWriter(TemporaryDirectory().name),
        lambda: LilcomHdf5Writer(NamedTemporaryFile().name),
        lambda: ChunkedLilcomHdf5Writer(NamedTemporaryFile().name),
        lambda: LilcomChunkyWriter(NamedTemporaryFile().name),
        lambda: NumpyFilesWriter(TemporaryDirectory().name),
        lambda: NumpyHdf5Writer(NamedTemporaryFile().name),
        pytest.param(
            lambda: KaldiWriter(TemporaryDirectory().name),
            marks=pytest.mark.skipif(
                not is_module_available("kaldiio"),
                reason="kaldiio must be installed for scp+ark feature writing",
            ),
        ),
    ],
)
def test_feature_set_builder(storage_fn):