def test_ic_fl_max_ctc():
    # Testing dataset with negative fl_max_ctc values
    ddict = example_data_dict(size=8472, keys=["fl1_max_ctc"])
    ddict["fl1_max_ctc"] -= min(ddict["fl1_max_ctc"]) + 1
    ds = new_dataset(ddict)
    with check.IntegrityChecker(ds) as ic:
        cues = ic.check_fl_max_ctc_positive()
    assert cues[0].level == "alert"
    assert cues[0].category == "feature data"

    # Testing dataset with fl_max_ctc values of 0.1
    ddict = example_data_dict(size=8472, keys=["fl1_max_ctc"])
    ddict["fl1_max_ctc"] -= min(ddict["fl1_max_ctc"]) - 0.1
    ds = new_dataset(ddict)
    with check.IntegrityChecker(ds) as ic:
        cues = ic.check_fl_max_ctc_positive()
    assert cues[0].level == "alert"
    assert cues[0].category == "feature data"

    # Testing dataset with fl_max_ctc values > 0.1
    ddict = example_data_dict(size=8472, keys=["fl1_max_ctc"])
    ddict["fl1_max_ctc"] -= min(ddict["fl1_max_ctc"]) - 1
    ds = new_dataset(ddict)
    with check.IntegrityChecker(ds) as ic:
        cues = ic.check_fl_max_ctc_positive()
    assert not cues
def test_ic_expand_section():
    ddict = example_data_dict(size=8472, keys=["area_um", "deform"])
    ds1 = new_dataset(ddict)
    ds2 = new_dataset(ddict)
    with check.IntegrityChecker(ds1) as ic:
        cues1 = ic.check_metadata_missing(expand_section=True)
    with check.IntegrityChecker(ds2) as ic:
        cues2 = ic.check_metadata_missing(expand_section=False)
    assert len(cues1) > len(cues2)
Exemple #3
0
def test_user_section_set_save_reload_empty_dict():
    """The 'user' config section as an empty dict will not save"""
    h5path = retrieve_data("fmt-hdf5_fl_2018.zip")
    with new_dataset(h5path) as ds:
        ds.config.update({"user": {}})
        expath = h5path.with_name("exported.rtdc")
        ds.export.hdf5(expath, features=ds.features_innate)
    # nothing "user"-like is written to the HDF5 attributes
    with h5py.File(expath, "r") as h5:
        for ak in h5.attrs:
            assert not ak.startswith("user")
    # works for dclab because "user" added when checked
    with new_dataset(expath) as ds2:
        assert ds2.config["user"] == {}
Exemple #4
0
def test_contour_from_hdf5():
    ds1 = new_dataset(retrieve_data("fmt-hdf5_image-bg_2020.zip"))
    assert ds1["contour"].shape == (5, np.nan, 2)

    edest = tempfile.mkdtemp()
    f1 = join(edest, "dclab_test_export_hdf5_image.rtdc")
    with RTDCWriter(f1) as hw:
        hw.store_metadata({"setup": ds1.config["setup"],
                           "experiment": ds1.config["experiment"]})
        hw.store_feature("deform", ds1["deform"])
        hw.store_feature("contour", ds1["contour"])

    ds2 = new_dataset(f1)
    assert ds2["contour"].shape == (5, np.nan, 2)
Exemple #5
0
def test_lower_case_conversion_issue_139():
    """Check that chip region is lower-case"""
    h5path = retrieve_data("fmt-hdf5_polygon_gate_2021.zip")
    with h5py.File(h5path, "a") as h5:
        h5.attrs["setup:chip region"] = "Channel"
    with new_dataset(h5path) as ds:
        assert ds.config["setup"]["chip region"] == "channel"
Exemple #6
0
def test_user_section_exists():
    """Check that the user config section exists"""
    ds = new_dataset(retrieve_data("fmt-hdf5_fl_2018.zip"))
    assert ds.config["user"] == {}
    # control: nonsense sections don't exist
    with pytest.raises(KeyError):
        ds.config["Oh I seem to have lost my key"]
Exemple #7
0
def test_user_section_set_save_reload_fmt_dcor():
    """Check that 'user' section metadata works for RTDC_DCOR"""
    # create temp directory for storing outputted file
    tpath = pathlib.Path(tempfile.mkdtemp())
    with new_dataset("fb719fb2-bd9f-817a-7d70-f4002af916f0") as ds:
        metadata = {"some metadata": 12}
        ds.config.update({"user": metadata})
        assert ds.config["user"] == metadata
        expath = tpath / "exported.rtdc"
        ds.export.hdf5(expath, features=["deform", "area_um"])
    # make sure that worked
    with h5py.File(expath, "r") as h5:
        assert h5.attrs["user:some metadata"] == 12
    # check it again with dclab
    with new_dataset(expath) as ds2:
        assert ds2.config["user"] == metadata
Exemple #8
0
def test_filter_manual():
    # make sure min/max values are filtered
    ddict = example_data_dict(size=8472, keys=["area_um", "deform"])
    ds = new_dataset(ddict)
    ds.filter.manual[[0, 8471]] = False
    ds.apply_filter()
    assert len(ds["deform"][ds.filter.all]) == 8470
    assert ds["deform"][1] == ds["deform"][ds.filter.all][0]
def test_ic_fl_metadata_channel_names():
    ddict = example_data_dict(size=8472, keys=["area_um", "deform", "fl1_max"])
    ddict["trace"] = {"fl1_raw": [range(10)] * 1000}
    ds = new_dataset(ddict)
    with check.IntegrityChecker(ds) as ic:
        cues = ic.check_fl_metadata_channel_names()
    assert cues[0].category == "metadata missing"
    assert cues[0].cfg_section == "fluorescence"
    assert cues[0].cfg_key == "channel 1 name"
Exemple #10
0
def test_disable_filters():
    """Disabling the filters should only affect RTDCBase.filter.all"""
    ddict = example_data_dict(size=8472, keys=["area_um", "deform"])
    ds = new_dataset(ddict)
    ds.filter.manual[[0, 8471]] = False
    ds.apply_filter()
    ds.config["filtering"]["enable filters"] = False
    ds.apply_filter()
    assert np.alltrue(ds.filter.all)
def test_temperature():
    # there are probably a million things wrong with this dataset, but
    # we are only looking for the temperature thing
    ddict = example_data_dict(size=8472, keys=["area_um", "deform", "temp"])
    ds = new_dataset(ddict)
    sstr = "Metadata: Missing key [setup] 'temperature', " \
           + "because the 'temp' feature is given"
    _, aler, _ = check_dataset(ds)
    assert sstr in aler
def test_shapein_issue3_bad_medium_control(si_version):
    h5path = retrieve_data("fmt-hdf5_fl_2018.zip")
    with h5py.File(h5path, "a") as h5:
        h5.attrs["setup:software version"] = si_version
        h5.attrs["setup:medium"] = "CellCarrierB"
    ds = new_dataset(h5path)
    with check.IntegrityChecker(ds) as ic:
        cues = ic.check_shapein_issue3_bad_medium()
        assert len(cues) == 0
def test_ic_fl_metadata_channel_names_2():
    ddict = example_data_dict(size=8472, keys=["area_um", "deform"])
    ds = new_dataset(ddict)
    ds.config["fluorescence"]["channel 1 name"] = "peter"
    with check.IntegrityChecker(ds) as ic:
        cues = ic.check_fl_metadata_channel_names()
    assert cues[0].category == "metadata invalid"
    assert cues[0].cfg_section == "fluorescence"
    assert cues[0].cfg_key == "channel 1 name"
def test_ic_metadata_choices_medium():
    ddict = example_data_dict(size=8472, keys=["area_um", "deform"])
    ds = new_dataset(ddict)
    ds.config["setup"]["medium"] = "honey"
    with check.IntegrityChecker(ds) as ic:
        cues = ic.check_metadata_choices()
    # changed in 0.29.1: medium can now be an arbitrary string
    # except for an empty string.
    assert len(cues) == 0
def test_ic_flow_rate_not_zero():
    ddict = example_data_dict(size=8472, keys=["area_um", "deform"])
    ds = new_dataset(ddict)
    ds.config["setup"]["flow rate"] = 0
    with check.IntegrityChecker(ds) as ic:
        cues = ic.check_metadata_bad_greater_zero()
    assert cues[0].category == "metadata wrong"
    assert cues[0].cfg_section == "setup"
    assert cues[0].cfg_key == "flow rate"
def test_ic_fl_num_channels():
    ddict = example_data_dict(size=8472, keys=["area_um", "deform"])
    ds = new_dataset(ddict)
    ds.config["fluorescence"]["channel count"] = 3
    ds.config["fluorescence"]["channel 1 name"] = "hans"
    with check.IntegrityChecker(ds) as ic:
        cues = ic.check_fl_num_channels()
    assert cues[0].category == "metadata wrong"
    assert cues[0].cfg_section == "fluorescence"
    assert cues[0].cfg_key == "channel count"
def test_ic_metadata_bad():
    ddict = example_data_dict(size=8472, keys=["area_um", "deform"])
    ds = new_dataset(ddict)
    # Since version 0.35, metadata are checked in `Configuration` class
    with pytest.warns(dclab.rtdc_dataset.config.WrongConfigurationTypeWarning,
                      match="run index"):
        ds.config["experiment"]["run index"] = "1"
    with check.IntegrityChecker(ds) as ic:
        cues = ic.check_metadata_bad()
    assert len(cues) == 0
def test_ic_invalid_dataset():
    # Testing if IC throws NotImplementedError for hierarchy datasets
    ddict = example_data_dict(size=8472, keys=["area_um", "deform"])
    ds = new_dataset(ddict)
    ds_child = new_dataset(ds)
    with check.IntegrityChecker(ds_child) as ic:
        with pytest.raises(NotImplementedError):
            ic.check()

    # Testing if IC throws NotImplementedError for raw-datasets with
    # applied filters
    ddict = example_data_dict(size=8472, keys=["area_um", "deform"])
    ds = new_dataset(ddict)
    ds.config["filtering"]["area_um max"] = 100
    ds.config["filtering"]["area_um min"] = 1
    ds.apply_filter()
    with check.IntegrityChecker(ds) as ic:
        with pytest.raises(NotImplementedError):
            ic.check()
Exemple #19
0
def test_user_section_basic():
    """Add information to the user section of config"""
    ds = new_dataset(retrieve_data("fmt-hdf5_fl_2018.zip"))
    metadata = {
        "channel area": 100.5,
        "inlet": True,
        "n_constrictions": 3,
        "channel information": "other information"
    }
    ds.config.update({"user": metadata})
    assert ds.config["user"] == metadata
def test_ml_class():
    """Test score data outside boundary"""
    data = {
        "ml_score_001": [.1, 10, -10, 0.01, .89],
        "ml_score_002": [.2, .1, .4, 0, .4],
    }
    ds = new_dataset(data)
    with check.IntegrityChecker(ds) as ic:
        cues = ic.check_ml_class()
        assert len(cues) == 1
        assert "ml_score_001" in cues[0].msg
def test_ic_fl_num_lasers():
    ddict = example_data_dict(size=8472, keys=["area_um", "deform"])
    ds = new_dataset(ddict)
    ds.config["fluorescence"]["laser count"] = 3
    ds.config["fluorescence"]["laser 1 lambda"] = 550
    ds.config["fluorescence"]["laser 1 power"] = 20
    with check.IntegrityChecker(ds) as ic:
        cues = ic.check_fl_num_lasers()
    assert cues[0].category == "metadata wrong"
    assert cues[0].cfg_section == "fluorescence"
    assert cues[0].cfg_key == "laser count"
Exemple #22
0
def test_index_increment():
    rtdc_file = tempfile.mktemp(suffix=".rtdc",
                                prefix="dclab_test_error_")
    with RTDCWriter(rtdc_file) as hw:
        hw.store_metadata({"experiment": {"sample": "test",
                                          "run index": 1}})
        hw.store_feature("index", np.arange(1, 11))
        hw.store_feature("index", np.arange(1, 11))

    with new_dataset(rtdc_file) as ds:
        assert np.all(ds["index"] == np.arange(1, 21))
Exemple #23
0
def test_user_section_set_save_reload_fmt_hdf5_containers():
    """Check that 'user' section metadata works for container data types"""
    h5path = retrieve_data("fmt-hdf5_fl_2018.zip")
    channel_area = [0, 100]
    inlet = (1, 20, 40)
    outlet = np.array(inlet)
    metadata = {"channel area": channel_area, "inlet": inlet, "outlet": outlet}
    with new_dataset(h5path) as ds:
        ds.config.update({"user": metadata})
        expath = h5path.with_name("exported.rtdc")
        ds.export.hdf5(expath, features=ds.features_innate)
    # make sure that worked
    with h5py.File(expath, "r") as h5:
        assert all(h5.attrs["user:channel area"] == channel_area)
        assert all(h5.attrs["user:inlet"] == inlet)
        assert all(h5.attrs["user:outlet"] == outlet)
    # now check again with dclab
    with new_dataset(expath) as ds2:
        for k1 in metadata:
            assert all(metadata[k1] == ds2.config["user"][k1])
Exemple #24
0
def test_user_section_set_and_overwrite():
    """Add information to the user section of config via dict.__setitem__"""
    ds = new_dataset(retrieve_data("fmt-hdf5_fl_2018.zip"))
    ds.config["user"]["some metadata"] = 42
    assert ds.config["user"] == {"some metadata": 42}
    metadata = {"more metadata": True}
    ds.config.update({"user": metadata})
    assert ds.config["user"] == {"some metadata": 42, "more metadata": True}
    # overwrite the previous keys and values
    ds.config["user"] = {}
    assert ds.config["user"] == {}
def test_shapein_issue3_bad_medium(si_version):
    h5path = retrieve_data("fmt-hdf5_fl_2018.zip")
    with h5py.File(h5path, "a") as h5:
        h5.attrs["setup:software version"] = si_version
        h5.attrs["setup:medium"] = "CellCarrierB"
    ds = new_dataset(h5path)
    with check.IntegrityChecker(ds) as ic:
        cues = ic.check_shapein_issue3_bad_medium()
        assert len(cues) == 1
        assert cues[0].cfg_key == "medium"
        assert cues[0].cfg_section == "setup"
        assert cues[0].category == "metadata wrong"
Exemple #26
0
def test_only_one_boundary_error():
    ddict = example_data_dict(size=8472, keys=["area_um", "deform"])
    ddict["area_um"][[1, 4, 6]] = np.nan
    ds = new_dataset(ddict)
    amin, amax = np.nanmin(ds["area_um"]), np.nanmax(ds["area_um"])
    ds.config["filtering"]["area_um min"] = (amax + amin) / 2
    try:
        ds.apply_filter()
    except ValueError:
        pass
    else:
        assert False, "setting only half of a box filter should not work"
Exemple #27
0
def test_config_invalid_key():
    pytest.importorskip("nptdms")
    ds = new_dataset(retrieve_data("fmt-tdms_fl-image_2016.zip"))
    with warnings.catch_warnings(record=True) as w:
        # Cause all warnings to always be triggered.
        warnings.simplefilter("always")
        # Trigger a warning.
        ds.config["setup"]["invalid_key"] = "picard"
        # Verify some things
        assert len(w) == 1
        assert issubclass(w[-1].category, dccfg.UnknownConfigurationKeyWarning)
        assert "invalid_key" in str(w[-1].message)
Exemple #28
0
def test_user_section_set_save_reload_fmt_dict():
    """Check that 'user' section metadata works for RTDC_Dict"""
    # create temp directory for storing outputted file
    tpath = pathlib.Path(tempfile.mkdtemp())
    ddict = example_data_dict(size=67, keys=["area_um", "deform"])
    ds = new_dataset(ddict)
    metadata = {"some metadata": 42}
    ds.config.update({"user": metadata})
    assert ds.config["user"] == metadata
    # must add some metadata to the "experiment" key for loading with dclab
    ds.config["experiment"]["sample"] = "test"
    ds.config["experiment"]["run index"] = 1
    expath = tpath / "exported.rtdc"
    with expath as exp:
        ds.export.hdf5(exp, features=["deform", "area_um"])
    # make sure that worked
    with h5py.File(expath, "r") as h5:
        assert h5.attrs["user:some metadata"] == 42
    # check again with dclab
    with new_dataset(expath) as ds2:
        assert ds2.config["user"] == metadata
def test_config_save_load():
    # Download and extract data
    tdms_path = retrieve_data(example_data_sets[0])
    ds = new_dataset(tdms_path)
    cfg_file = tempfile.mktemp(prefix="test_dclab_rtdc_config_")
    ds.config.save(cfg_file)
    loaded = Configuration(files=[cfg_file])
    assert equals(loaded, ds.config)
    cleanup()
    try:
        os.remove(cfg_file)
    except OSError:
        pass
Exemple #30
0
def test_config_save_load():
    pytest.importorskip("nptdms")
    # Download and extract data
    tdms_path = retrieve_data("fmt-tdms_minimal_2016.zip")
    ds = new_dataset(tdms_path)
    cfg_file = tempfile.mktemp(prefix="test_dclab_rtdc_config_")
    ds.config.save(cfg_file)
    loaded = dccfg.Configuration(files=[cfg_file])
    assert equals(loaded, ds.config)
    try:
        os.remove(cfg_file)
    except OSError:
        pass
Exemple #31
0
def test_user_section_set_with_update():
    """Add information to the user section of config with .update"""
    ds = new_dataset(retrieve_data("fmt-hdf5_fl_2018.zip"))
    metadata = {"some metadata": 42}
    ds.config.update({"user": metadata})
    assert ds.config["user"] == {"some metadata": 42}

    metadata2 = {"channel information": "information"}
    ds.config["user"].update(metadata2)
    assert ds.config["user"] == {
        "some metadata": 42,
        "channel information": "information"
    }
def test_filter_min_max():
    # make sure min/max values are filtered
    ddict = example_data_dict(size=8472, keys=["area_um", "deform"])
    ds = new_dataset(ddict)
    amin, amax = ds["area_um"].min(), ds["area_um"].max()
    ds.config["filtering"]["area_um min"] = (amax + amin) / 2
    ds.config["filtering"]["area_um max"] = amax
    ds.apply_filter()
    assert np.sum(ds.filter.all) == 4256

    # make sure data is not filtered before calling ds.apply_filter
    dmin, dmax = ds["deform"].min(), ds["deform"].max()
    ds.config["filtering"]["deform min"] = (dmin + dmax) / 2
    ds.config["filtering"]["deform max"] = dmax
    assert np.sum(ds.filter.all) == 4256
def test_config_basic():
    ds = new_dataset(retrieve_data(example_data_sets[1]))
    assert ds.config["imaging"]["roi size y"] == 96.
    cleanup()