def test_task_compress_and_repack_strip_issue19(qtbot, monkeypatch):
    """Same test as above, only tests whether repack and strip logs works"""
    path = retrieve_data("rtdc_data_hdf5_rtfdc.zip")
    path_out = path.with_name("compressed")
    path_out.mkdir()
    # Monkeypatch
    monkeypatch.setattr(QDialog, "exec_", lambda *args: QMessageBox.Ok)
    monkeypatch.setattr(QMessageBox, "exec_", lambda *args: QMessageBox.Ok)
    monkeypatch.setattr(QFileDialog, "getExistingDirectory",
                        lambda *args: str(path_out))

    mw = DCKit(check_update=False)
    qtbot.addWidget(mw)
    mw.append_paths([path])
    mw.checkBox_repack.setChecked(True)
    pouts, invalid = mw.on_task_compress()
    assert len(pouts) == 1
    assert len(invalid) == 0
    with dclab.new_dataset(pouts[0]) as ds, dclab.new_dataset(path) as ds0:
        assert len(ds) == len(ds0)
        assert len(ds.logs) == 0
        scf = list(set(ds.features_scalar + ds0.features_scalar))
        for feat in scf:
            assert feat in ds0
            assert feat in ds
            assert np.all(ds[feat] == ds0[feat])
Exemple #2
0
def test_tdms2rtdc_bulk():
    pytest.importorskip("nptdms")
    path_data = retrieve_data("fmt-tdms_shapein-2.0.1-no-image_2017.zip")
    path_wd = pathlib.Path(
        tempfile.mkdtemp(prefix="tdms2rtdc_bulk_")).resolve()
    path_in = path_wd / "input"
    path_in.mkdir()
    shutil.copytree(path_data.parent, path_in / "data_1")
    shutil.copytree(path_data.parent, path_in / "data_2")
    shutil.copytree(path_data.parent, path_in / "data_3")
    (path_in / "data_nested").mkdir()
    shutil.copytree(path_data.parent, path_in / "data_nested" / "data_4")
    # same directory (will be cleaned up with path_in)
    path_out = path_wd / "output"
    path_out.mkdir()

    cli.tdms2rtdc(path_tdms=path_in,
                  path_rtdc=path_out,
                  compute_features=False)

    for pp in [path_out / "data_1" / "M1_data.rtdc",
               path_out / "data_2" / "M1_data.rtdc",
               path_out / "data_3" / "M1_data.rtdc",
               path_out / "data_nested" / "data_4" / "M1_data.rtdc"]:
        assert pp.exists()

        with new_dataset(pp) as ds2, new_dataset(path_data) as ds1:
            assert len(ds2)
            assert set(ds1.features) == set(ds2.features)
            # not all features are computed
            assert set(ds2._events.keys()) < set(ds1.features)
            for feat in ds1:
                assert np.all(ds1[feat] == ds2[feat])
Exemple #3
0
def test_af_emodulus_visc_only_2():
    keys = ["area_um", "deform"]
    ddict = example_data_dict(size=8472, keys=keys)
    visc = dclab.features.emodulus.viscosity.get_viscosity(
        medium="CellCarrier",
        channel_width=30,
        flow_rate=0.16,
        temperature=23.0)
    # legacy
    ds = dclab.new_dataset(ddict)
    ds.config["setup"]["flow rate"] = 0.16
    ds.config["setup"]["channel width"] = 30
    ds.config["imaging"]["pixel size"] = .34
    ds.config["calculation"] = {"emodulus lut": "LE-2D-FEM-19",
                                "emodulus medium": "other",
                                "emodulus temperature": 47.0,  # irrelevant
                                "emodulus viscosity": visc
                                }
    # visc-only
    ds2 = dclab.new_dataset(ddict)
    ds2.config["setup"]["flow rate"] = 0.16
    ds2.config["setup"]["channel width"] = 30
    ds2.config["imaging"]["pixel size"] = .34
    ds2.config["calculation"] = {"emodulus lut": "LE-2D-FEM-19",
                                 "emodulus viscosity": visc
                                 }
    assert np.sum(~np.isnan(ds["emodulus"])) > 0
    assert np.allclose(ds["emodulus"], ds2["emodulus"], equal_nan=True,
                       rtol=0, atol=1e-15)
Exemple #4
0
def test_af_emodulus_reservoir():
    """Reservoir measurements should not have emodulus"""
    keys = ["area_um", "deform"]
    ddict = example_data_dict(size=8472, keys=keys)
    # legacy
    ds = dclab.new_dataset(ddict)
    ds.config["setup"]["flow rate"] = 0.16
    ds.config["setup"]["channel width"] = 30
    ds.config["imaging"]["pixel size"] = .34
    ds.config["calculation"] = {"emodulus lut": "LE-2D-FEM-19",
                                "emodulus medium": "CellCarrier",
                                "emodulus temperature": 23.0,
                                "emodulus viscosity": 0.5
                                }
    assert "emodulus" in ds
    ds2 = dclab.new_dataset(ddict)
    ds2.config["setup"]["flow rate"] = 0.16
    ds2.config["setup"]["channel width"] = 30
    ds2.config["imaging"]["pixel size"] = .34
    ds2.config["calculation"] = {"emodulus lut": "LE-2D-FEM-19",
                                 "emodulus medium": "CellCarrier",
                                 "emodulus temperature": 23.0,
                                 "emodulus viscosity": 0.5
                                 }
    ds2.config["setup"]["chip region"] = "reservoir"
    assert "emodulus" not in ds2
def test_logs():
    path_in = retrieve_data("fmt-hdf5_mask-contour_2018.zip")

    with new_dataset(path_in) as ds:
        assert not ds.logs

    # write some logs
    with h5py.File(path_in, "a") as h5:
        hw = rtdc_dataset.RTDCWriter(h5)
        hw.store_log("test_log", ["peter", "hans"])

    with new_dataset(path_in) as ds:
        assert ds.logs
        assert ds.logs["test_log"][0] == "peter"

    # remove logs
    with h5py.File(path_in, "a") as h5:
        del h5["logs"]

    with new_dataset(path_in) as ds:
        assert not ds.logs
        try:
            ds.logs["test_log"]
        except KeyError:  # no log data
            pass
Exemple #6
0
def test_register_external_lut_and_get_emodulus():
    keys = ["area_um", "deform"]
    ddict = example_data_dict(size=8472, keys=keys)
    # from internal LUT
    ds = dclab.new_dataset(ddict)
    ds.config["setup"]["flow rate"] = 0.16
    ds.config["setup"]["channel width"] = 30
    ds.config["imaging"]["pixel size"] = .34
    ds.config["calculation"] = {"emodulus lut": "LE-2D-FEM-19",
                                "emodulus medium": "CellCarrier",
                                "emodulus temperature": 23.0
                                }
    assert np.sum(~np.isnan(ds["emodulus"])) > 0
    # from external LUT
    identifier = "test-test_register_external_lut"
    path = resource_filename("dclab.features.emodulus",
                             emodulus.load.INTERNAL_LUTS["LE-2D-FEM-19"])
    emodulus.register_lut(path, identifier=identifier)
    ds2 = dclab.new_dataset(ddict)
    ds2.config["setup"]["flow rate"] = 0.16
    ds2.config["setup"]["channel width"] = 30
    ds2.config["imaging"]["pixel size"] = .34
    ds2.config["calculation"] = {"emodulus lut": identifier,
                                 "emodulus medium": "CellCarrier",
                                 "emodulus temperature": 23.0
                                 }
    assert np.sum(~np.isnan(ds2["emodulus"])) > 0
    assert np.allclose(ds["emodulus"], ds2["emodulus"], equal_nan=True,
                       rtol=0, atol=1e-15)
def test_export_and_load():
    h5path = retrieve_data("fmt-hdf5_fl_2018.zip")
    # register temporary feature
    dclab.register_temporary_feature(feature="fl1_mean")

    with dclab.new_dataset(h5path) as ds:
        # extract the feature information from the dataset
        fl1_mean = np.array(
            [np.mean(ds["trace"]["fl1_raw"][ii]) for ii in range(len(ds))])
        # set the data
        dclab.set_temporary_feature(rtdc_ds=ds,
                                    feature="fl1_mean",
                                    data=fl1_mean)
        # export the data to a new file
        expath = h5path.with_name("exported.rtdc")
        ds.export.hdf5(expath, features=ds.features_innate + ["fl1_mean"])

    # make sure that worked
    with h5py.File(expath, "r") as h5:
        assert "fl1_mean" in h5["events"]
        assert np.allclose(h5["events"]["fl1_mean"], fl1_mean)

    # now check again with dclab
    with dclab.new_dataset(expath) as ds2:
        assert "fl1_mean" in ds2
        assert np.allclose(ds2["fl1_mean"], fl1_mean)

        # and a control check
        deregister_all()
        assert "fl1_mean" not in ds2
def test_manual_exclude():
    data = example_data_dict(42, keys=["area_um", "deform"])
    p = new_dataset(data)
    c1 = new_dataset(p)
    c2 = new_dataset(c1)
    c3 = new_dataset(c2)
    c1.filter.manual[0] = False
    c2.apply_filter()
    c2.filter.manual[1] = False
    c3.apply_filter()

    # simple exclusion of few events
    assert len(c3) == len(p) - 2

    # removing same event in parent removes the event from the
    # child altogether, including the manual filter
    c3.filter.manual[0] = False
    c2.filter.manual[0] = False
    c3.apply_filter()
    assert np.alltrue(c3.filter.manual)

    # reinserting the event in the parent, retrieves back
    # the manual filter in the child
    c2.filter.manual[0] = True
    c3.apply_filter()
    assert not c3.filter.manual[0]
def test_get_min_max_inf():
    # generate fake dataset
    path = pathlib.Path(__file__).parent / "data" / "calibration_beads_47.rtdc"
    with dclab.new_dataset(path) as ds:
        config = copy.deepcopy(ds.config)

    tmp = tempfile.mktemp(".rtdc", prefix="example_filter_inf_")
    ds2 = dclab.new_dataset({"deform": np.linspace(0, .01, 100),
                             "area_um": np.linspace(20, 200, 100),
                             "area_ratio": np.linspace(1, 1.1, 100)
                             })
    ds2.config.update(config)
    ds2["area_ratio"][0] = np.inf
    ds2["area_ratio"][1] = np.nan
    ds2.export.hdf5(tmp, features=["area_um", "deform", "area_ratio"])

    # initiate the pipeline
    pl = pipeline.Pipeline()
    pl.add_slot(path=tmp)
    pl.add_filter()

    # get the current min/max values
    amin, amax = pl.get_min_max("area_ratio")
    assert amin == ds2["area_ratio"][2]
    assert amax == 1.1

    try:
        pathlib.Path(tmp).unlink()
    except BaseException:
        pass
def test_dcor_hierarchy(monkeypatch):
    monkeypatch.setattr(dclab.rtdc_dataset.fmt_dcor,
                        "APIHandler",
                        MockAPIHandler)
    dso = dclab.new_dataset("https://example.com/api/3/action/dcserv?id=1")
    dsh = dclab.new_dataset(dso)
    assert np.all(dso["area_um"] == dsh["area_um"])
def test_hierarchy_shape_mask():
    ds = new_dataset(retrieve_data("fmt-hdf5_image-bg_2020.zip"))
    ds.filter.manual[0] = False
    ch = new_dataset(ds)
    assert "mask" in ch.features_innate
    assert len(ch["mask"]) == 4
    assert ch["mask"].shape == (4, 80, 250)
Exemple #12
0
def test_hdf5_traces_filter2():
    """Length of traces was wrong when filters were applied #112

    Test dataset lenght only with trace.
    """
    ds = new_dataset(retrieve_data("fmt-hdf5_fl_2018.zip"))

    # applying some filters
    ds.config["filtering"]["deform min"] = 0.01
    ds.config["filtering"]["deform max"] = 0.1
    ds.apply_filter()

    # sanity check
    assert np.sum(ds.filter.all) == 3

    edest = tempfile.mkdtemp()
    f1 = join(edest, "test.rtdc")
    ds.export.hdf5(f1, ["deform", "trace"])

    ds2 = new_dataset(f1)
    assert len(ds2) == 3
    assert len(ds2["deform"]) == 3
    assert len(ds2["trace"]["fl1_median"]) == 3
    assert np.all(ds["trace"]["fl1_raw"][3] == ds2["trace"]["fl1_raw"][0])
    assert np.all(ds["trace"]["fl1_raw"][5] == ds2["trace"]["fl1_raw"][1])
    assert np.all(ds["trace"]["fl1_raw"][6] == ds2["trace"]["fl1_raw"][2])
Exemple #13
0
def test_pf_export_and_load():
    """Check that exported and loaded hdf5 file will keep a plugin feature"""
    h5path = retrieve_data("fmt-hdf5_fl_2018.zip")
    # initialize PlugInFeature instance
    info = example_plugin_info_single_feature()
    pf = PlugInFeature("circ_per_area", info)

    with dclab.new_dataset(h5path) as ds:
        # extract the feature information from the dataset
        assert pf in PlugInFeature.features
        circ_per_area = ds[pf.feature_name]

        # export the data to a new file
        expath = h5path.with_name("exported.rtdc")
        ds.export.hdf5(expath, features=ds.features_innate + [pf.feature_name])

    # make sure that worked
    with h5py.File(expath, "r") as h5:
        assert pf.feature_name in h5["events"]
        assert np.allclose(h5["events"][pf.feature_name], circ_per_area)

    # now check again with dclab
    with dclab.new_dataset(expath) as ds2:
        assert pf in PlugInFeature.features
        assert pf.feature_name in ds2
        assert pf.feature_name in ds2.features_innate
        assert np.allclose(ds2[pf.feature_name], circ_per_area)

        # and a control check
        remove_plugin_feature(pf)
        assert pf.feature_name not in ds2
Exemple #14
0
def test_pf_export_non_scalar_filtered_from_file_issue_166():
    h5path = retrieve_data("fmt-hdf5_image-bg_2020.zip")
    expath = h5path.with_name("exported.rtdc")
    # initialize PlugInFeature instance
    info = example_plugin_info_non_scalar_feature()
    info["feature shapes"] = [(80, 250)]
    pf = PlugInFeature("image_gauss_filter", info)

    # write the plugin feature data to an HDF5 file
    with dclab.new_dataset(h5path) as ds:
        # extract the feature information from the dataset
        ds.export.hdf5(expath, features=[pf.feature_name])

    # remove all plugin features and work with temporary feature
    remove_all_plugin_features()

    # try to load the plugin feature data from that HDF5 file and
    # export it again (this time, the exporter has to get the data
    # from the H5File object).
    expath2 = h5path.with_name("exported2.rtdc")
    dclab.register_temporary_feature("image_gauss_filter", is_scalar=False)
    with dclab.new_dataset(expath) as ds2:
        ds2.export.hdf5(expath2,
                        features=["image_gauss_filter"],
                        filtered=True)

    # make sure that worked
    with h5py.File(expath2, "r") as h5:
        assert "image_gauss_filter" in h5["events"]
def test_af_ml_class_bad_feature():
    data = {
        "ml_score_0-1": [.1, .3, .1, 0.01, .59],
    }
    with pytest.raises(ValueError,
                       match="Invalid feature name 'ml_score_0-1'"):
        dclab.new_dataset(data)
Exemple #16
0
def test_tdms2rtdc_update_roi_size():
    pytest.importorskip("nptdms")
    path_in = retrieve_data("fmt-tdms_fl-image_2016.zip")
    # set wrong roi sizes
    camin = path_in.with_name("M1_camera.ini")
    with camin.open("r") as fd:
        lines = fd.readlines()
    lines = lines[:-2]
    lines.append("width = 23\n")
    lines.append("height = 24\n")
    with camin.open("w") as fd:
        fd.writelines(lines)

    # same directory (will be cleaned up with path_in)
    path_out = path_in.with_name("out.rtdc")

    cli.tdms2rtdc(path_tdms=path_in,
                  path_rtdc=path_out,
                  compute_features=False,
                  skip_initial_empty_image=True)

    with new_dataset(path_out) as dsj, new_dataset(path_in) as ds0:
        assert ds0.config["imaging"]["roi size x"] == 23
        assert ds0.config["imaging"]["roi size y"] == 24
        assert dsj.config["imaging"]["roi size x"] == 256
        assert dsj.config["imaging"]["roi size y"] == 96
        wlog = "dclab-tdms2rtdc-warnings"
        assert "LimitingExportSizeWarning" in dsj.logs[wlog]
def test_hierarchy_shape_contour():
    ds = new_dataset(retrieve_data("fmt-hdf5_image-bg_2020.zip"))
    assert ds["contour"].shape == (5, np.nan, 2)
    ds.filter.manual[0] = False
    ch = new_dataset(ds)
    assert ch["contour"].shape == (4, np.nan, 2)
    assert len(ch["contour"]) == 4
Exemple #18
0
def test_join_frame():
    path_in1 = retrieve_data("fmt-hdf5_mask-contour_2018.zip")
    path_in2 = retrieve_data("fmt-hdf5_mask-contour_2018.zip")
    # same directory (will be cleaned up with path_in)
    path_out = path_in1.with_name("out.rtdc")

    # modify acquisition times
    with h5py.File(path_in1, mode="a") as h1:
        h1.attrs["experiment:date"] = "2019-11-04"
        h1.attrs["experiment:time"] = "15:00:00"

    with h5py.File(path_in2, mode="a") as h2:
        h2.attrs["experiment:date"] = "2019-11-05"
        h2.attrs["experiment:time"] = "16:01:15.050"

    offset = 24 * 60 * 60 + 60 * 60 + 1 * 60 + 15 + .05

    cli.join(path_out=path_out, paths_in=[path_in1, path_in2])
    with new_dataset(path_out) as dsj, new_dataset(path_in1) as ds0:
        fr = ds0.config["imaging"]["frame rate"]
        assert np.allclose(dsj["frame"],
                           np.concatenate((ds0["frame"],
                                           ds0["frame"] + offset * fr)),
                           rtol=0,
                           atol=.0001)
def test_manual_exclude_parent_changed():
    data = example_data_dict(42, keys=["area_um", "tilt"])
    p = new_dataset(data)
    p.filter.manual[4] = False
    c = new_dataset(p)
    c.filter.manual[5] = False
    c.apply_filter()
    p.config["filtering"]["tilt min"] = 0
    p.config["filtering"]["tilt max"] = .5
    p.apply_filter()
    assert np.sum(p.filter.all) == 21
    # size of child is directly determined from parent
    assert len(c) == 21
    # filters have not yet been updated
    assert len(c.filter.all) == 41
    assert c.filter.parent_changed
    # the initially excluded event
    assert c.filter.retrieve_manual_indices(c) == [6]

    # try to change the excluded events
    try:
        c.filter.apply_manual_indices(c, [1, 2])
    except fmt_hierarchy.HierarchyFilterError:
        pass
    else:
        assert False, "expected HierarchyFilterError"

    # this can be resolved by applying the filter
    c.apply_filter()
    c.filter.apply_manual_indices(c, [1, 2])
    assert c.filter.retrieve_manual_indices(c) == [1, 2]
def test_hdf5_filtered():
    N = 10
    keys = ["area_um", "image"]
    ddict = example_data_dict(size=N, keys=keys)
    ddict["image"][3] = np.arange(10 * 20, dtype=np.uint8).reshape(10, 20) + 22

    ds1 = dclab.new_dataset(ddict)
    ds1.config["experiment"]["sample"] = "test"
    ds1.config["experiment"]["run index"] = 1
    ds1.filter.manual[2] = False
    ds1.apply_filter()
    fta = ds1.filter.manual.copy()

    edest = tempfile.mkdtemp()
    f1 = join(edest, "dclab_test_export_hdf5_filtered.rtdc")
    ds1.export.hdf5(f1, keys)

    ds2 = dclab.new_dataset(f1)

    assert ds1 != ds2
    assert np.allclose(ds2["area_um"], ds1["area_um"][fta])
    assert np.allclose(ds2["image"][2], ds1["image"][3])
    assert np.all(ds2["image"][2] != ds1["image"][2])

    # cleanup
    shutil.rmtree(edest, ignore_errors=True)
def test_dcor_base(monkeypatch):
    monkeypatch.setattr(dclab.rtdc_dataset.fmt_dcor,
                        "APIHandler",
                        MockAPIHandler)
    with dclab.new_dataset(retrieve_data("fmt-hdf5_fl_2018.zip")) as ds:
        dso = dclab.new_dataset("https://example.com/api/3/action/dcserv?id=1")
        assert len(dso) == len(ds)
        assert dso.config["setup"]["channel width"] == \
            ds.config["setup"]["channel width"]
        assert np.all(dso["area_um"] == ds["area_um"])
        assert np.all(dso["area_um"] == ds["area_um"])  # test cache
        assert np.all(dso["image"][4] == ds["image"][4])
        assert len(dso["image"]) == len(ds)
        for key in dso._events:
            assert key in ds
        for m, n in zip(dso["mask"], ds["mask"]):
            assert np.all(m == n)
        # compute an ancillary feature
        assert np.all(dso["volume"] == ds["volume"])
        assert np.all(dso["volume"] == ds["volume"])  # test cache
        # trace
        assert sorted(dso["trace"].keys()) == sorted(ds["trace"].keys())
        assert len(dso["trace"]["fl1_raw"]) == len(ds["trace"]["fl1_raw"])
        assert np.all(dso["trace"]["fl1_raw"][1] == ds["trace"]["fl1_raw"][1])
        for t1, t2 in zip(dso["trace"]["fl1_raw"], ds["trace"]["fl1_raw"]):
            assert np.all(t1 == t2)
def test_load_nonexistent_file_issue81():
    """https://github.com/ZELLMECHANIK-DRESDEN/dclab/issues/81"""
    try:
        dclab.new_dataset("path/does/not/exist.rtdc")
    except FileNotFoundError:
        pass
    else:
        assert False, "Non-existent files should raise FileNotFoundError"
def test_feat_mask():
    path = retrieve_data("fmt-hdf5_mask-contour_2018.zip")
    ds = new_dataset(path)
    ds.filter.manual[0] = False
    ds.filter.manual[2] = False
    ch = new_dataset(ds)
    assert np.all(ch["mask"][0] == ds["mask"][1])
    assert np.all(ch["mask"][1] == ds["mask"][3])
def test_features():
    path = retrieve_data("fmt-hdf5_fl_2017.zip")
    ds = new_dataset(path)
    ch = new_dataset(ds)
    assert ds.features == ch.features
    assert ds.features_innate == ch.features_innate
    assert ds.features_loaded == ch.features_loaded
    assert ds.features_scalar == ch.features_scalar
def test_feat_contour():
    path = retrieve_data("fmt-hdf5_fl_2017.zip")
    ds = new_dataset(path)
    ds.filter.manual[0] = False
    ds.filter.manual[2] = False
    ch = new_dataset(ds)
    assert np.all(ch["contour"][0] == ds["contour"][1])
    assert np.all(ch["contour"][1] == ds["contour"][3])
def test_index_deep_contour():
    data = example_data_dict(42, keys=["area_um", "contour", "deform"])
    ds = new_dataset(data)
    ds.filter.manual[3] = False
    c1 = new_dataset(ds)
    c1.filter.manual[1] = False
    c2 = new_dataset(c1)
    assert np.all(c2["contour"][3] == ds["contour"][5])
def test_feat_trace():
    path = retrieve_data("fmt-hdf5_fl_2017.zip")
    ds = new_dataset(path)
    ds.filter.manual[0] = False
    ds.filter.manual[2] = False
    ch = new_dataset(ds)
    assert np.all(ch["trace"]["fl1_median"][0] == ds["trace"]["fl1_median"][1])
    assert np.all(ch["trace"]["fl1_median"][1] == ds["trace"]["fl1_median"][3])
Exemple #28
0
def test_hdf5_contour_from_hdf5():
    ds1 = new_dataset(retrieve_data("fmt-hdf5_image-bg_2020.zip"))
    assert ds1["contour"].shape == (5, np.nan, 2)

    edest = tempfile.mkdtemp()
    f1 = join(edest, "dclab_test_export_hdf5_image.rtdc")
    ds1.export.hdf5(f1, ["contour"], filtered=False)
    ds2 = dclab.new_dataset(f1)
    assert ds2["contour"].shape == (5, np.nan, 2)
def test_event_count():
    pytest.importorskip("nptdms")
    tdms_path = retrieve_data("fmt-tdms_fl-image_2016.zip")
    ds = new_dataset(tdms_path)
    ds.filter.manual[0] = False
    ch = new_dataset(ds)
    assert ds.config["experiment"]["event count"] == len(ds)
    assert ch.config["experiment"]["event count"] == len(ch)
    assert len(ds) == len(ch) + 1
Exemple #30
0
def test_trace_import_fail():
    # make sure undefined trace data does not raise an error
    tdms_path = retrieve_data("fmt-tdms_fl-image_2016.zip")
    dclab.definitions.FLUOR_TRACES.append("peter")
    dclab.rtdc_dataset.fmt_tdms.naming.tr_data_map["peter"] = [u'ukwn', u'ha']
    new_dataset(tdms_path)
    # clean up
    dclab.rtdc_dataset.fmt_tdms.naming.tr_data_map.pop("peter")
    dclab.definitions.FLUOR_TRACES.pop(-1)
Exemple #31
0
def test_pf_inherited_scalar():
    """Scalar inherited PluginFeatures should be a 1D np.ndarray"""
    info = example_plugin_info_single_feature()
    PlugInFeature("circ_per_area", info)
    with dclab.new_dataset(retrieve_data("fmt-hdf5_fl_2018.zip")) as ds:
        ds.filter.manual[2] = False
        ch = dclab.new_dataset(ds)
        assert "circ_per_area" in ch
        assert isinstance(ch["circ_per_area"], np.ndarray)
        assert ch["circ_per_area"].ndim == 1
def test_trace_import_fail():
    # make sure undefined trace data does not raise an error
    tdms_path = retrieve_data(example_data_sets[1])
    dclab.definitions.FLUOR_TRACES.append("peter")
    dclab.rtdc_dataset.fmt_tdms.naming.tr_data_map["peter"] = [u'ukwn', u'ha']
    new_dataset(tdms_path)
    # clean up
    dclab.rtdc_dataset.fmt_tdms.naming.tr_data_map.pop("peter")
    dclab.definitions.FLUOR_TRACES.pop(-1)
    cleanup()
def test_time():
    ds = dclab.new_dataset(retrieve_data("rtdc_data_minimal.zip"))
    tt = ds["time"]
    assert tt[0] == 0
    assert np.allclose(tt[1], 0.0385)
    assert np.all(np.diff(tt) > 0)
    cleanup()
def test_volume():
    ds = dclab.new_dataset(retrieve_data("rtdc_data_minimal.zip"))
    vol = ds["volume"]
    # There are a lot of nans, because the contour is not given everywhere
    vol = vol[~np.isnan(vol)]
    assert np.allclose(vol[0], 574.60368907528346)
    assert np.allclose(vol[12], 1010.5669523203878)
def test_aspect():
    # Aspect ratio of the data
    ds = dclab.new_dataset(retrieve_data("rtdc_data_traces_video_bright.zip"))
    aspect = ds["aspect"]
    assert np.sum(aspect > 1) == 904
    assert np.sum(aspect < 1) == 48
    cleanup()
def test_area_ratio():
    ds = dclab.new_dataset(retrieve_data("rtdc_data_traces_video.zip"))
    comp_ratio = ds["area_ratio"]
    # The convex area is always >= the raw area
    assert np.all(comp_ratio >= 1)
    assert np.allclose(comp_ratio[0], 1.0196464)
    cleanup()
def test_image_basic():
    ds = new_dataset(retrieve_data(example_data_sets[1]))
    # Transition image
    assert np.allclose(ds["image"][0], 0)
    # Real image
    assert np.allclose(np.average(ds["image"][1]), 45.1490478515625)
    cleanup()
def test_compatibility_minimal():
    ds = new_dataset(retrieve_data("rtdc_data_minimal.zip"))
    assert ds.config["setup"]["channel width"] == 20
    assert ds.config["setup"]["chip region"].lower() == "channel"
    assert ds.config["setup"]["flow rate"] == 0.12
    assert ds.config["imaging"]["pixel size"] == 0.34
    cleanup()
def test_fl_crosstalk_3chanvs2chan():
    data = {"fl1_max": np.linspace(1, 1.1, 10),
            "fl2_max": np.linspace(0, 4.1, 10),
            "fl3_max": np.linspace(3, 2.5, 10),
            }
    ds = dclab.new_dataset(data)
    analysis = {"calculation": {"crosstalk fl12": .4,
                                "crosstalk fl21": .05,
                                }}
    ds.config.update(analysis)
    assert "fl2_max_ctc" in ds
    try:
        ds["fl2_max_ctc"]
    except ancillaries.af_fl_max_ctc.MissingCrosstalkMatrixElementsError:
        pass
    else:
        assert False, "Crosstalk correction from missing data should not work"
    # add missing matrix elements
    analysis = {"calculation": {"crosstalk fl13": .1,
                                "crosstalk fl23": .7,
                                "crosstalk fl31": .2,
                                "crosstalk fl32": .2,
                                }}
    ds.config.update(analysis)
    ds["fl1_max_ctc"]
    ds["fl2_max_ctc"]
    ds["fl3_max_ctc"]
    ds.config.update(analysis)
def test_classify_treatment_repetition_simple():
    measurements = []
    dd = {"area_um": np.linspace(40, 50, 10),
          "deform": np.linspace(.1, .2, 10)}
    for ii in range(5):
        ctl = dclab.new_dataset(dd)
        ctl.title = "donor {} control".format(ii)
        ctl.config["setup"]["chip region"] = "channel"
        trt = dclab.new_dataset(dd)
        trt.title = "donor {}".format(ii)
        trt.config["setup"]["chip region"] = "channel"
        measurements += [ctl, trt]
    ana = Analysis(measurements)
    treatment, repetition = classify_treatment_repetition(ana, id_ctl="control")
    assert treatment == ["Control", "Treatment"] * 5
    assert np.all(repetition == np.repeat(np.arange(5), 2) + 1)
def test_kde_log_scatter():
    ddict = example_data_dict(size=300, keys=["area_um", "deform"])
    ddict["deform"][:20] = .1
    ddict["area_um"][:20] = .5
    ds = dclab.new_dataset(ddict)
    a = ds.get_kde_scatter(yscale="log")
    assert np.all(a[:20] == a[0])
def test_collect_data_tree():
    features = ["area_um", "deform", "time"]
    edest = pathlib.Path(tempfile.mkdtemp(prefix="shapeout_test"))

    for ii in range(1, 4):
        dat = new_dataset(data=example_data_dict(ii + 10, keys=features))
        cfg = {"experiment": {"sample": "test sample",
                              "run index": ii},
               "imaging": {"pixel size": 0.34},
               "setup": {"channel width": 20,
                         "chip region": "channel",
                         "flow rate": 0.04}
               }
        dat.config.update(cfg)
        dat.export.hdf5(path=edest / "{}.rtdc".format(ii),
                        features=features)
    data = meta_tool.collect_data_tree([edest])[0]
    assert len(data) == 1, "only one data folder"
    assert len(data[0]) == 4, "three measurements"

    # check name
    assert data[0][0][0] == "test sample"

    # check order
    assert data[0][1][1].endswith("1.rtdc")
    assert data[0][2][1].endswith("2.rtdc")
    assert data[0][3][1].endswith("3.rtdc")
    shutil.rmtree(str(edest), ignore_errors=True)
def test_config():
    ds = new_dataset(retrieve_data("rtdc_data_hdf5_contour_image_trace.zip"))
    assert ds.config["setup"]["channel width"] == 30
    assert ds.config["setup"]["chip region"].lower() == "channel"
    assert ds.config["setup"]["flow rate"] == 0.16
    assert ds.config["imaging"]["pixel size"] == 0.34
    cleanup()
def test_trace():
    ds = new_dataset(retrieve_data("rtdc_data_hdf5_contour_image_trace.zip"))
    assert len(ds["trace"]) == 2
    assert ds["trace"]["fl1_raw"].shape == (5, 100)
    assert np.allclose(np.average(
        ds["trace"]["fl1_median"][0]), 0.027744706519425219)
    cleanup()
def test_0basic():
    ds = dclab.new_dataset(retrieve_data(example_data_sets[1]))
    for cc in ['fl1_pos',
               'frame',
               'size_x',
               'size_y',
               'contour',
               'area_cvx',
               'circ',
               'image',
               'trace',
               'fl1_width',
               'nevents',
               'pos_x',
               'pos_y',
               'fl1_area',
               'fl1_max',
               ]:
        assert cc in ds

    # ancillaries
    for cc in ["deform",
               "area_um",
               "aspect",
               "frame",
               "index",
               "time",
               ]:
        assert cc in ds

    cleanup()
def test_invert_copy():
    dclab.PolygonFilter.clear_all_filters()
    ddict = example_data_dict(size=1234, keys=["area_um", "deform"])
    ds = dclab.new_dataset(ddict)
    # points of polygon filter
    points = [[np.min(ddict["area_um"]), np.min(ddict["deform"])],
              [np.min(ddict["area_um"]), np.max(ddict["deform"])],
              [np.average(ddict["area_um"]), np.max(ddict["deform"])],
              [np.average(ddict["area_um"]), np.min(ddict["deform"])],
              ]
    filt1 = dclab.PolygonFilter(axes=["area_um", "deform"],
                                points=points,
                                inverted=False)
    ds.polygon_filter_add(filt1)
    assert [0] == ds.config["filtering"]["polygon filters"]
    n1 = np.sum(ds._filter)
    ds.apply_filter()
    n2 = np.sum(ds._filter)
    assert n1 != n2
    filt2 = filt1.copy(invert=True)
    ds.polygon_filter_add(filt2)
    assert [0, 1] == ds.config["filtering"]["polygon filters"]
    ds.apply_filter()
    assert np.sum(ds._filter) == 0, "inverted+normal filter filters all"
    dclab.PolygonFilter.clear_all_filters()
def test_collect_data_tree_order():
    features = ["area_um", "deform", "time"]
    edest = pathlib.Path(tempfile.mkdtemp(prefix="shapeout_test"))

    for ii in range(1, 13):
        dat = new_dataset(data=example_data_dict(ii + 10, keys=features))
        cfg = {"experiment": {"sample": "test sample",
                              "run index": ii},
               "imaging": {"pixel size": 0.34},
               "setup": {"channel width": 20,
                         "chip region": "channel",
                         "flow rate": 0.04}
               }
        dat.config.update(cfg)
        dat.export.hdf5(path=edest / "M{}_data.rtdc".format(ii),
                        features=features)
    data = meta_tool.collect_data_tree([edest])[0]
    assert pathlib.Path(data[0][1][1]).name == "M1_data.rtdc"
    assert pathlib.Path(data[0][2][1]).name == "M2_data.rtdc"
    assert pathlib.Path(data[0][3][1]).name == "M3_data.rtdc"
    assert pathlib.Path(data[0][4][1]).name == "M4_data.rtdc"
    assert pathlib.Path(data[0][5][1]).name == "M5_data.rtdc"
    assert pathlib.Path(data[0][6][1]).name == "M6_data.rtdc"
    assert pathlib.Path(data[0][7][1]).name == "M7_data.rtdc"
    assert pathlib.Path(data[0][8][1]).name == "M8_data.rtdc"
    assert pathlib.Path(data[0][9][1]).name == "M9_data.rtdc"
    assert pathlib.Path(data[0][10][1]).name == "M10_data.rtdc"
    assert pathlib.Path(data[0][11][1]).name == "M11_data.rtdc"
    assert pathlib.Path(data[0][12][1]).name == "M12_data.rtdc"
    shutil.rmtree(str(edest), ignore_errors=True)
def test_inert_ratio_raw():
    ds = new_dataset(retrieve_data("rtdc_data_hdf5_mask_contour.zip"))

    raw = ir.get_inert_ratio_raw(cont=ds["contour"])
    ref = np.array([4.25854232,  1.22342663,  4.64971179,  1.70914857,
                    3.62797492, 1.51502192,  2.74757573,  1.79841136])
    assert np.allclose(ref, raw, rtol=0, atol=5e-9)
    cleanup()
def test_trace_methods():
    ds = new_dataset(retrieve_data(example_data_sets[1]))
    for k in list(ds["trace"].keys()):
        assert k in dclab.definitions.FLUOR_TRACES
    for k in ds["trace"]:
        assert k in dclab.definitions.FLUOR_TRACES
    assert ds["trace"].__repr__().count("<loaded into memory>")
    cleanup()
def test_kde_log_scatter_points():
    ddict = example_data_dict(size=300, keys=["area_um", "deform"])
    ds = dclab.new_dataset(ddict)
    a = ds.get_kde_scatter(yscale="log")
    b = ds.get_kde_scatter(yscale="log",
                           positions=[ds["area_um"], ds["deform"]])

    assert np.all(a == b)
def test_wrong_things():
    ddict = example_data_dict(size=67, keys=["area_um", "deform"])
    ds = new_dataset(ddict)
    # Check unknown variable (warning will be displayed
    try:
        ds.apply_filter(force=["on_purpose_unknown"])
    except ValueError:
        pass
def test_trace_basic():
    ds = new_dataset(retrieve_data(example_data_sets[1]))
    msg = "traces should not be loaded into memory before first access"
    assert ds["trace"].__repr__().count("<not loaded into memory>"), msg
    assert len(ds["trace"]) == 2
    assert np.allclose(np.average(
        ds["trace"]["fl1_median"][0]), 287.08999999999997)
    cleanup()
def test_downsample_none():
    keys = ["area_um", "deform", "time", "frame", "fl3_width"]
    ddict = example_data_dict(size=8472, keys=keys)
    ds = dclab.new_dataset(ddict)

    assert np.sum(ds._plot_filter) == 8472
    ds.apply_filter()
    ds.get_downsampled_scatter(downsample=0)
    assert np.sum(ds._plot_filter) == 8472
def test_image_out_of_bounds():
    ds = new_dataset(retrieve_data(example_data_sets[1]))
    try:
        ds["image"][5]
    except IndexError:
        pass
    else:
        raise ValueError("IndexError should have been raised!")
    cleanup()
def test_avi_export():
    ds = new_dataset(retrieve_data(example_data_sets[1]))
    edest = tempfile.mkdtemp()
    f1 = join(edest, "test.avi")
    ds.export.avi(path=f1)
    assert os.stat(
        f1)[6] > 1e4, "Resulting file to small, Something went wrong!"
    shutil.rmtree(edest, ignore_errors=True)
    cleanup()
def test_kde_positions():
    ddict = example_data_dict()
    ds = dclab.new_dataset(ddict)

    ds.config["filtering"]["enable filters"] = False
    sc = ds.get_kde_scatter(xax="area_um", yax="deform")
    sc2 = ds.get_kde_scatter(xax="area_um", yax="deform",
                             positions=(ds["area_um"], ds["deform"]))
    assert np.all(sc == sc2)
def test_mask_basic():
    ds = new_dataset(retrieve_data(example_data_sets[1]))
    assert len(ds["mask"]) == 12
    # Test mask computation by averaging brightness and comparing to
    # the ancillary feature "bright_avg".
    bavg1 = ds["bright_avg"][1]
    bavg2 = np.mean(ds["image"][1][ds["mask"][1]])
    assert np.allclose(bavg1, bavg2), "mask is correctly computed from contour"
    cleanup()
def test_fl_crosstalk_missing():
    data = {"fl1_max": np.linspace(1, 1.1, 10),
            "fl2_max": np.linspace(0, 4.1, 10),
            }
    ds = dclab.new_dataset(data)
    analysis = {"calculation": {"crosstalk fl12": .4,
                                }}
    ds.config.update(analysis)
    assert "fl2_max_ctc" not in ds
def test_kde_log_contour():
    ddict = example_data_dict(size=300, keys=["area_um", "deform"])
    ddict["deform"][:20] = .1
    ddict["area_um"][:20] = .5
    ds = dclab.new_dataset(ddict)
    xm, ym, _ = ds.get_kde_contour(yscale="log")
    dx = np.diff(xm[0])
    dy = np.diff(np.log(ym[:, 0]))
    assert np.allclose(dx, dx[0])
    assert np.allclose(dy, dy[0])
def test_kde_nofilt():
    ddict = example_data_dict()
    ds = dclab.new_dataset(ddict)
    ds.config["filtering"]["enable filters"] = False
    sc = ds.get_kde_scatter()
    cc = ds.get_kde_contour()
    assert sc.shape[0] == 100
    # This will fail if the default contour accuracy is changed
    # in `get_kde_contour`.
    assert cc[0].shape == (43, 41)