Exemplo n.º 1
0
def test_sa_h5py_17():
    """
    Test that localizations iterator does not skip empty frames (when requested not to).
    """
    peaks = {"x" : numpy.zeros(10),
             "y" : numpy.ones(10)}

    empty = {"x" : numpy.array([]),
             "y" : numpy.array([])}    

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing = False, overwrite = True) as h5:
        h5.setMovieInformation(100, 100, 5, "")
        h5.addLocalizations(peaks, 0)
        h5.addLocalizations(empty, 1)
        h5.addLocalizations(peaks, 2)

    # Read data.
    with saH5Py.SAH5Py(h5_name) as h5:
        for i, [fnum, locs] in enumerate(h5.localizationsIterator(skip_empty = False)):
            assert(i == fnum)
Exemplo n.º 2
0
def test_merge_2():
    """
    Test file merging, skipping files with no tracks.
    """
    metadata = "<xml><field1><data1>data</data1></field></xml>"
    ref_tracks = {"x" : numpy.random.randint(0,10,10),
                  "y" : numpy.random.randint(0,10,10)}

    # Create HDF5 files to merge.
    h5_names = []
    for i in range(3):
        h5_name = storm_analysis.getPathOutputTest("test_merge_f" + str(i) + ".hdf5")
        h5_names.append(h5_name)

        with saH5Py.SAH5Py(h5_name, is_existing = False, overwrite = True) as h5:
            h5.addMetadata(metadata)
            h5.setMovieInformation(20,20,1,"")
            h5.setPixelSize(100.0)
            if(i != 1):
                h5.addTracks(ref_tracks)

    # Merge.
    merge_name = storm_analysis.getPathOutputTest("test_merge.hdf5")
    storm_analysis.removeFile(merge_name)
    mergeHDF5.mergeHDF5(h5_names, merge_name)

    # Check merge.
    with saH5Py.SAH5Py(merge_name) as h5:
        assert(metadata == h5.getMetadata())
        for tracks in h5.tracksIterator():
            assert(numpy.allclose(ref_tracks["x"], tracks["x"]))
Exemplo n.º 3
0
def test_sa_h5py_9():
    """
    Test setting the track id field.
    """
    peaks = {"x": numpy.zeros(10), "y": numpy.ones(10)}

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Add localizations and track id.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        h5.addLocalizations(peaks, 1)
        h5.addTrackID(numpy.ones(10), 1)

    # Check track id.
    with saH5Py.SAH5Py(h5_name) as h5:
        locs = h5.getLocalizationsInFrame(1)
        assert (numpy.allclose(locs["track_id"], numpy.ones(10)))

    # Change track id.
    with saH5Py.SAH5Py(h5_name) as h5:
        h5.addTrackID(numpy.zeros(10), 1)

    # Check track id.
    with saH5Py.SAH5Py(h5_name) as h5:
        locs = h5.getLocalizationsInFrame(1)
        assert (numpy.allclose(locs["track_id"], numpy.zeros(10)))
Exemplo n.º 4
0
def test_sa_h5py_6():
    """
    Test adding tracks.
    """
    tracks = {"x": numpy.zeros(10), "y": numpy.ones(10)}

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write tracks.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        h5.addTracks(tracks)

    # Read tracks.
    with saH5Py.SAH5Py(h5_name) as h5:
        assert (h5.getNTracks() == tracks["x"].size)

    # Write tracks again, this should overwrite above.
    with saH5Py.SAH5Py(h5_name) as h5:
        h5.addTracks(tracks)
        h5.addTracks(tracks)

    # Read tracks.
    with saH5Py.SAH5Py(h5_name) as h5:
        assert (h5.getNTracks() == 2 * tracks["x"].size)
Exemplo n.º 5
0
def test_sa_h5py_7():
    """
    Test tracks iterator.
    """
    tracks = {"x": numpy.zeros(10), "y": numpy.ones(10)}

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # No tracks.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        pass

    with saH5Py.SAH5Py(h5_name) as h5:
        for t in h5.tracksIterator():
            assert (False)  # We should not get here.

    # Tracks.
    storm_analysis.removeFile(h5_name)
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        h5.addTracks(tracks)

    with saH5Py.SAH5Py(h5_name) as h5:
        for t in h5.tracksIterator():
            assert (numpy.allclose(t["x"], tracks["x"]))

        # Only get one field.
        for t in h5.tracksIterator(["x"]):
            assert (not "y" in t)
Exemplo n.º 6
0
def test_fiducials_3():
    """
    Basic fiducials test.
    """
    peaks = {
        "x": numpy.array([1.0, 2.0, 3.0]),
        "y": numpy.array([1.0, 1.0, 1.0])
    }

    filename = "test_fiducials.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        for i in range(3):
            temp = {}
            for elt in peaks:
                temp[elt] = peaks[elt][i:]
            h5.addLocalizations(temp, i)

        h5.addMovieInformation(FakeReader(n_frames=4))

    # Track fiducials..
    fiducials.trackFiducials(h5_name, radius=0.1, reference_frame=2)

    # Check.
    with saH5Py.SAH5Py(h5_name) as h5:
        expected = numpy.array([-1, -1, 0])
        for fnum, locs in h5.localizationsIterator(fields=["fiducial_id"]):
            assert numpy.allclose(locs["fiducial_id"], expected[fnum:])
Exemplo n.º 7
0
def test_sa_h5py_4():
    """
    Test handling of drift correction.
    """
    peaks = {"x": numpy.zeros(10), "y": numpy.ones(10)}

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        h5.addLocalizations(peaks, 1)
        h5.setDriftCorrection(1, dx=1.0, dy=-1.0)

    # Read data.
    with saH5Py.SAH5Py(h5_name) as h5:

        # not corrected.
        locs = h5.getLocalizationsInFrame(1)
        assert (numpy.allclose(peaks["x"], locs["x"]))
        assert (numpy.allclose(peaks["y"], locs["y"]))

        # corrected.
        locs = h5.getLocalizationsInFrame(1, drift_corrected=True)
        assert (numpy.allclose(peaks["x"] + 1.0, locs["x"]))
        assert (numpy.allclose(peaks["y"] - 1.0, locs["y"]))
Exemplo n.º 8
0
def test_sa_h5py_2():
    """
    Test data round trip.
    """
    peaks = {"x": numpy.zeros(10), "y": numpy.ones(10)}

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        h5.setMovieInformation(1, 1, 2, "")
        h5.addLocalizations(peaks, 1)
        h5.addLocalizations(peaks, 1, channel=1)

    # Read data.
    with saH5Py.SAH5Py(h5_name) as h5:

        # Check that frame 0 is empty.
        locs = h5.getLocalizationsInFrame(0)
        assert (not bool(locs))

        # Check frame1.
        locs = h5.getLocalizationsInFrame(1)
        assert (numpy.allclose(peaks["x"], locs["x"]))
        assert (numpy.allclose(peaks["y"], locs["y"]))
        assert (numpy.allclose(peaks["x"], locs["c1_x"]))
        assert (numpy.allclose(peaks["y"], locs["c1_y"]))

        # Check getting a specific field.
        locs = h5.getLocalizationsInFrame(1, fields=["x"])
        assert ("x" in locs)
        assert (not "y" in locs)
Exemplo n.º 9
0
def test_fiducials_1():
    """
    Basic fiducials test.
    """
    peaks = {
        "x": numpy.array([1.0, 2.0, 3.0]),
        "y": numpy.array([1.0, 1.0, 1.0])
    }

    filename = "test_fiducials.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        for i in range(3):
            h5.addLocalizations(peaks, i)

        h5.addMovieInformation(FakeReader(n_frames=3))

    # Track fiducials..
    fiducials.trackFiducials(h5_name, radius=0.1)

    # Check.
    with saH5Py.SAH5Py(h5_name) as h5:
        for fnum, locs in h5.localizationsIterator(fields=["fiducial_id"]):
            assert (numpy.allclose(locs["fiducial_id"], numpy.arange(3)))
Exemplo n.º 10
0
def test_fiducials_7():
    """
    Iterator test.
    """
    peaks = {
        "x": numpy.array([1.0, 2.0, 3.0]),
        "y": numpy.array([1.0, 1.0, 1.0])
    }

    filename = "test_fiducials.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        for i in range(3):
            temp = {}
            for elt in peaks:
                temp[elt] = peaks[elt][i:]
            h5.addLocalizations(temp, i)

        h5.addMovieInformation(FakeReader(n_frames=4))

    # Track fiducials..
    fiducials.trackFiducials(h5_name, radius=0.1, reference_frame=2)

    # Check.
    with fiducials.SAH5Fiducials(h5_name) as h5:
        for fdcl in h5.fiducialsIterator():
            assert (numpy.allclose(fdcl["frame"], numpy.arange(3)))
Exemplo n.º 11
0
def test_fiducials_8():
    """
    Gap test.
    """
    peaks = {
        "x": numpy.array([1.0, 2.0, 3.0]),
        "y": numpy.array([1.0, 1.0, 1.0])
    }

    filename = "test_fiducials.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        for i in [0, 1, 3]:
            h5.addLocalizations(peaks, i)

        h5.addMovieInformation(FakeReader(n_frames=4))

    # Track fiducials..
    fiducials.trackFiducials(h5_name, radius=0.1, max_gap=1)

    # Check.
    with fiducials.SAH5Fiducials(h5_name) as h5:
        expected = numpy.array([0, 1, 3])
        for fdcl in h5.fiducialsIterator():
            assert (numpy.allclose(fdcl["frame"], expected))
def test_cl_sa_h5py_2():
    """
    Test basic cluster file mechanics (using tracks).
    """
    tracks = {
        "x": numpy.arange(11, dtype=numpy.float),
        "y": numpy.arange(11, dtype=numpy.float)
    }

    filename = "test_clusters_sa_h5py.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write track data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        h5.setMovieInformation(1, 1, 2, "")
        h5.addTracks(tracks)

    # Write clustering data for tracks.
    cluster_id = numpy.remainder(numpy.arange(11), 3)
    cluster_data = {
        "track_id": numpy.zeros(11, dtype=numpy.int),
        "loc_id": numpy.arange(11)
    }

    cl_size = [0, 4, 4, 3]
    with clSAH5Py.SAH5Clusters(h5_name) as cl_h5:
        cl_h5.addClusters(cluster_id, cluster_data)

        assert (cl_h5.getNClusters() == (len(cl_size) - 1))
        for index, cluster in cl_h5.clustersIterator(skip_unclustered=False):
            for field in cluster:
                assert (cluster[field].size == cl_size[index])
Exemplo n.º 13
0
def test_fiducials_4():
    """
    Test no localizations in reference frame.
    """
    peaks = {
        "x": numpy.array([1.0, 2.0, 3.0]),
        "y": numpy.array([1.0, 1.0, 1.0])
    }

    filename = "test_fiducials.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        for i in range(3):
            h5.addLocalizations(peaks, i)
        h5.addMovieInformation(FakeReader(n_frames=5))

    # Track fiducials..
    okay = False
    try:
        fiducials.trackFiducials(h5_name, radius=0.1, reference_frame=3)
    except fiducials.FiducialException:
        okay = True
    assert okay
def test_cl_sa_h5py_6():
    """
    Test getting all of the tracks for clustering.
    """
    tracks = {
        "category": numpy.arange(4, dtype=numpy.int32),
        "x": numpy.arange(4, dtype=numpy.float),
        "y": numpy.arange(4, dtype=numpy.float),
        "z": numpy.arange(4, dtype=numpy.float)
    }

    filename = "test_clusters_sa_h5py.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write tracks data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        h5.setMovieInformation(1, 1, 2, "")
        h5.setPixelSize(100.0)
        h5.addTracks(tracks)
        h5.addTracks(tracks)

    # Test getting all the tracking data.
    with clSAH5Py.SAH5Clusters(h5_name) as cl_h5:
        [x, y, z, c, cl_dict] = cl_h5.getDataForClustering()
        assert (numpy.allclose(x, cl_dict['loc_id']))
        assert (numpy.allclose(y, cl_dict['loc_id']))
        assert (numpy.allclose(z, cl_dict['loc_id']))
        assert (numpy.allclose(c, cl_dict['loc_id']))
        assert (numpy.allclose(cl_dict['track_id'],
                               numpy.array([0, 0, 0, 0, 1, 1, 1, 1])))
def test_cl_sa_h5py_5():
    """
    Test getting all of the localizations for clustering.
    """
    locs = {
        "category": numpy.arange(4, dtype=numpy.int32),
        "x": numpy.arange(4, dtype=numpy.float),
        "y": numpy.arange(4, dtype=numpy.float)
    }

    filename = "test_clusters_sa_h5py.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write localization data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        h5.setMovieInformation(1, 1, 5, "")
        h5.setPixelSize(100.0)
        h5.addLocalizations(locs, 1)
        h5.addLocalizations(locs, 3)

    # Test getting all the localization data.
    with clSAH5Py.SAH5Clusters(h5_name) as cl_h5:
        [x, y, z, c, cl_dict] = cl_h5.getDataForClustering()
        assert (numpy.allclose(x, cl_dict['loc_id']))
        assert (numpy.allclose(y, cl_dict['loc_id']))
        assert (numpy.allclose(z, numpy.zeros(x.size)))
        assert (numpy.allclose(c, cl_dict['loc_id']))
        assert (numpy.allclose(cl_dict['frame'],
                               numpy.array([1, 1, 1, 1, 3, 3, 3, 3])))
def test_cl_sa_h5py_4():
    """
    Test cluster info string round trip.
    """
    locs = {
        "x": numpy.arange(10, dtype=numpy.float),
        "y": numpy.arange(10, dtype=numpy.float)
    }

    filename = "test_clusters_sa_h5py.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write localization data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        h5.setMovieInformation(1, 1, 2, "")
        h5.addLocalizations(locs, 1)

    # Write clustering data for localizations.
    cluster_id = numpy.remainder(numpy.arange(10), 3)
    cluster_data = {
        "frame": numpy.ones(10, dtype=numpy.int),
        "loc_id": numpy.arange(10)
    }

    info_string = "dbscan,eps,10.0,mc,5"
    with clSAH5Py.SAH5Clusters(h5_name) as cl_h5:
        cl_h5.addClusters(cluster_id, cluster_data)

        cl_h5.setClusteringInfo(info_string)
        assert (cl_h5.getClusteringInfo() == info_string)
Exemplo n.º 17
0
def test_sa_h5py_10():
    """
    Test 'is_existing' and 'overwrite' parameters.
    """
    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Test failure on trying to open a file that does not exist.
    try:
        with saH5Py.SAH5Py(h5_name) as h5:
            pass
    except saH5Py.SAH5PyException:
        pass
    else:
        assert (False)

    # Test failure on trying to overwrite a file that does exist.

    # Create the file.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        pass

    # Test that we cannot overwrite it.
    try:
        with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
            pass
    except saH5Py.SAH5PyException:
        pass
    else:
        assert (False)

    # Test that we can overwrite it.
    with saH5Py.SAH5Py(h5_name, is_existing=False, overwrite=True) as h5:
        pass
Exemplo n.º 18
0
def test_fiducials_9():
    """
    Test fiducial averaging.
    """
    peaks = {
        "x": numpy.array([1.0, 2.0, 3.0]),
        "y": numpy.array([1.0, 1.0, 1.0])
    }

    filename = "test_fiducials.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        for i in range(3):
            h5.addLocalizations(peaks, i)

        h5.addMovieInformation(FakeReader(n_frames=3))

    # Track fiducials..
    fiducials.trackFiducials(h5_name, radius=0.1)

    # Check
    with fiducials.SAH5Fiducials(h5_name) as h5:
        [ave, n] = h5.averageFiducials(fields=["y"])
        assert (numpy.allclose(ave["y"], numpy.ones(3)))
Exemplo n.º 19
0
def test_sa_h5py_12():
    """
    Test handling of multiple channels.
    """
    peaks = {"x": numpy.zeros(3), "y": numpy.ones(3)}

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        h5.setMovieInformation(1, 1, 2, "")
        h5.addLocalizations(peaks, 1)

        peaks["x"] += 1
        peaks["y"] += 1
        h5.addLocalizations(peaks, 1, channel=1)

        peaks["x"] += 1
        peaks["y"] += 1
        h5.addLocalizations(peaks, 1, channel=2)

    # Read data.
    with saH5Py.SAH5Py(h5_name) as h5:

        # Check getting number of channels.
        assert (h5.getNChannels() == 3)

        for [fnum, locs] in h5.localizationsIterator():
            for i, elt in enumerate(h5.splitByChannel(locs)):
                assert (numpy.allclose(elt["x"], i * numpy.ones(3)))
                assert (numpy.allclose(elt["y"], i * numpy.ones(3) + 1.0))
Exemplo n.º 20
0
def test_sa_h5py_2():
    """
    Test data round trip.
    """
    peaks = {"x" : numpy.zeros(10),
             "y" : numpy.ones(10)}

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        h5.setMovieInformation(1,1,2,"")
        h5.addLocalizations(peaks, 1)
        h5.addLocalizations(peaks, 1, channel = 1)

    # Read data.
    with saH5Py.SAH5Py(h5_name) as h5:

        # Check that frame 0 is empty.
        locs = h5.getLocalizationsInFrame(0)
        assert(not bool(locs))

        # Check frame1.
        locs = h5.getLocalizationsInFrame(1)
        assert(numpy.allclose(peaks["x"], locs["x"]))
        assert(numpy.allclose(peaks["y"], locs["y"]))
        assert(numpy.allclose(peaks["x"], locs["c1_x"]))
        assert(numpy.allclose(peaks["y"], locs["c1_y"]))

        # Check getting a specific field.
        locs = h5.getLocalizationsInFrame(1, fields = ["x"])
        assert("x" in locs)
        assert(not "y" in locs)
Exemplo n.º 21
0
def test_sa_h5py_3():
    """
    Test getting data from multiple frames.
    """
    peaks = {"x": numpy.zeros(10), "y": numpy.ones(10)}

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    fr = FakeReader()
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        h5.addMovieInformation(fr)
        for i in range(fr.getMovieL()):
            h5.addLocalizations(peaks, i)

    # Read data.
    with saH5Py.SAH5Py(h5_name) as h5:

        # Check localizations in first 5 frames.
        locs = h5.getLocalizationsInFrameRange(0, 5)
        assert (locs["x"].size == 50)

        # Get all the localizations.
        locs = h5.getLocalizations()
        assert (locs["x"].size == (10.0 * fr.getMovieL()))
Exemplo n.º 22
0
def test_sa_h5py_12():
    """
    Test handling of multiple channels.
    """
    peaks = {"x" : numpy.zeros(3),
             "y" : numpy.ones(3)}

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        h5.setMovieInformation(1,1,2,"")
        h5.addLocalizations(peaks, 1)

        peaks["x"] += 1
        peaks["y"] += 1
        h5.addLocalizations(peaks, 1, channel = 1)

        peaks["x"] += 1
        peaks["y"] += 1        
        h5.addLocalizations(peaks, 1, channel = 2)

    # Read data.
    with saH5Py.SAH5Py(h5_name) as h5:

        # Check getting number of channels.
        assert(h5.getNChannels() == 3)

        for [fnum, locs] in h5.localizationsIterator():
            for i, elt in enumerate(h5.splitByChannel(locs)):
                assert(numpy.allclose(elt["x"], i * numpy.ones(3)))
                assert(numpy.allclose(elt["y"], i * numpy.ones(3) + 1.0))
Exemplo n.º 23
0
def test_sa_h5py_16():
    """
    Test that localizations iterator skips empty frames.
    """
    peaks = {"x" : numpy.zeros(10),
             "y" : numpy.ones(10)}

    empty = {"x" : numpy.array([]),
             "y" : numpy.array([])}    

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing = False, overwrite = True) as h5:
        h5.setMovieInformation(100, 100, 5, "")
        h5.addLocalizations(peaks, 0)
        h5.addLocalizations(empty, 1)
        h5.addLocalizations(peaks, 2)

    # Read data.
    with saH5Py.SAH5Py(h5_name) as h5:
        for fnum, locs in h5.localizationsIterator():
            assert(fnum != 1)
Exemplo n.º 24
0
def test_sa_h5py_10():
    """
    Test 'is_existing' and 'overwrite' parameters.
    """
    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Test failure on trying to open a file that does not exist.
    try:
        with saH5Py.SAH5Py(h5_name) as h5:
            pass
    except saH5Py.SAH5PyException:
        pass
    else:
        assert(False)

    # Test failure on trying to overwrite a file that does exist.

    # Create the file.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        pass

    # Test that we cannot overwrite it.
    try:
        with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
            pass
    except saH5Py.SAH5PyException:
        pass
    else:
        assert(False)

    # Test that we can overwrite it.
    with saH5Py.SAH5Py(h5_name, is_existing = False, overwrite = True) as h5:
        pass
Exemplo n.º 25
0
def test_sa_h5py_11():
    """
    Test hasLocalizationField() and hasTracksField()
    """
    peaks = {"x" : numpy.zeros(10),
             "y" : numpy.ones(10)}

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        h5.setMovieInformation(256, 256, 10, "XYZZY")
        h5.addLocalizations(peaks, 1)
        h5.addTracks(peaks)

    # Check.
    with saH5Py.SAH5Py(h5_name) as h5:

        assert(h5.hasLocalizationsField("x"))
        assert(not h5.hasLocalizationsField("x1"))

        assert(h5.hasTracksField("x"))
        assert(not h5.hasTracksField("x1"))
Exemplo n.º 26
0
def test_sa_h5py_9():
    """
    Test setting the track id field.
    """
    peaks = {"x" : numpy.zeros(10),
             "y" : numpy.ones(10)}

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Add localizations and track id.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        h5.addLocalizations(peaks, 1)
        h5.addTrackID(numpy.ones(10), 1)

    # Check track id.
    with saH5Py.SAH5Py(h5_name) as h5:
        locs = h5.getLocalizationsInFrame(1)
        assert(numpy.allclose(locs["track_id"], numpy.ones(10)))
        
    # Change track id.
    with saH5Py.SAH5Py(h5_name) as h5:
        h5.addTrackID(numpy.zeros(10), 1)

    # Check track id.
    with saH5Py.SAH5Py(h5_name) as h5:
        locs = h5.getLocalizationsInFrame(1)
        assert(numpy.allclose(locs["track_id"], numpy.zeros(10)))
Exemplo n.º 27
0
def test_sa_h5py_7():
    """
    Test tracks iterator.
    """
    tracks = {"x" : numpy.zeros(10),
              "y" : numpy.ones(10)}

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # No tracks.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        pass

    with saH5Py.SAH5Py(h5_name) as h5:
        for t in h5.tracksIterator():
            assert(False) # We should not get here.

    # Tracks.
    storm_analysis.removeFile(h5_name)
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        h5.addTracks(tracks)

    with saH5Py.SAH5Py(h5_name) as h5:
        for t in h5.tracksIterator():
            assert(numpy.allclose(t["x"], tracks["x"]))

        # Only get one field.
        for t in h5.tracksIterator(["x"]):
            assert(not "y" in t)
Exemplo n.º 28
0
def test_sa_h5py_6():
    """
    Test adding tracks.
    """
    tracks = {"x" : numpy.zeros(10),
              "y" : numpy.ones(10)}

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write tracks.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        h5.addTracks(tracks)

    # Read tracks.
    with saH5Py.SAH5Py(h5_name) as h5:
        assert(h5.getNTracks() == tracks["x"].size)

    # Write tracks again, this should overwrite above.
    with saH5Py.SAH5Py(h5_name) as h5:
        h5.addTracks(tracks)
        h5.addTracks(tracks)

    # Read tracks.
    with saH5Py.SAH5Py(h5_name) as h5:
        assert(h5.getNTracks() == 2*tracks["x"].size)
def test_cl_sa_h5py_2():
    """
    Test basic cluster file mechanics (using tracks).
    """
    tracks = {"x" : numpy.arange(11, dtype = numpy.float),
              "y" : numpy.arange(11, dtype = numpy.float)}

    filename = "test_clusters_sa_h5py.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write track data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        h5.setMovieInformation(1,1,2,"")
        h5.addTracks(tracks)

    # Write clustering data for tracks.
    cluster_id = numpy.remainder(numpy.arange(11), 3)
    cluster_data = {"track_id" : numpy.zeros(11, dtype = numpy.int),
                    "loc_id" : numpy.arange(11)}

    cl_size = [0, 4, 4, 3]
    with clSAH5Py.SAH5Clusters(h5_name) as cl_h5:
        cl_h5.addClusters(cluster_id, cluster_data)

        assert(cl_h5.getNClusters() == (len(cl_size) - 1))
        for index, cluster in cl_h5.clustersIterator(skip_unclustered = False):
            for field in cluster:
                assert(cluster[field].size == cl_size[index])
Exemplo n.º 30
0
def test_sa_h5py_3():
    """
    Test getting data from multiple frames.
    """
    peaks = {"x" : numpy.zeros(10),
             "y" : numpy.ones(10)}

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    fr = FakeReader()
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        h5.addMovieInformation(fr)
        for i in range(fr.getMovieL()):
            h5.addLocalizations(peaks, i)

    # Read data.
    with saH5Py.SAH5Py(h5_name) as h5:

        # Check localizations in first 5 frames.
        locs = h5.getLocalizationsInFrameRange(0,5)
        assert(locs["x"].size == 50)

        # Get all the localizations.
        locs = h5.getLocalizations()
        assert(locs["x"].size == (10.0 * fr.getMovieL()))
def test_cl_sa_h5py_5():
    """
    Test getting all of the localizations for clustering.
    """
    locs = {"category" : numpy.arange(4, dtype = numpy.int32),
            "x" : numpy.arange(4, dtype = numpy.float),
            "y" : numpy.arange(4, dtype = numpy.float)}

    filename = "test_clusters_sa_h5py.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write localization data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        h5.setMovieInformation(1,1,5,"")
        h5.setPixelSize(100.0)
        h5.addLocalizations(locs, 1)
        h5.addLocalizations(locs, 3)

    # Test getting all the localization data.
    with clSAH5Py.SAH5Clusters(h5_name) as cl_h5:
        [x, y, z, c, cl_dict] = cl_h5.getDataForClustering()
        assert(numpy.allclose(x, cl_dict['loc_id']))
        assert(numpy.allclose(y, cl_dict['loc_id']))
        assert(numpy.allclose(z, numpy.zeros(x.size)))
        assert(numpy.allclose(c, cl_dict['loc_id']))
        assert(numpy.allclose(cl_dict['frame'], numpy.array([1,1,1,1,3,3,3,3])))
Exemplo n.º 32
0
def test_fiducials_8():
    """
    Gap test.
    """
    peaks = {"x" : numpy.array([1.0, 2.0, 3.0]),
             "y" : numpy.array([1.0, 1.0, 1.0])}

    filename = "test_fiducials.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        for i in [0,1,3]:
            h5.addLocalizations(peaks, i)

        h5.addMovieInformation(FakeReader(n_frames = 4))
        
    # Track fiducials..
    fiducials.trackFiducials(h5_name, radius = 0.1, max_gap = 1)

    # Check.
    with fiducials.SAH5Fiducials(h5_name) as h5:
        expected = numpy.array([0,1,3])
        for fdcl in h5.fiducialsIterator():
            assert(numpy.allclose(fdcl["frame"], expected))
Exemplo n.º 33
0
def test_fiducials_1():
    """
    Basic fiducials test.
    """
    peaks = {"x" : numpy.array([1.0, 2.0, 3.0]),
             "y" : numpy.array([1.0, 1.0, 1.0])}

    filename = "test_fiducials.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        for i in range(3):
            h5.addLocalizations(peaks, i)

        h5.addMovieInformation(FakeReader(n_frames = 3))
        
    # Track fiducials..
    fiducials.trackFiducials(h5_name, radius = 0.1)

    # Check.
    with saH5Py.SAH5Py(h5_name) as h5:
        for fnum, locs in h5.localizationsIterator(fields = ["fiducial_id"]):
            assert(numpy.allclose(locs["fiducial_id"], numpy.arange(3)))
Exemplo n.º 34
0
def test_fiducials_3():
    """
    Basic fiducials test.
    """
    peaks = {"x" : numpy.array([1.0, 2.0, 3.0]),
             "y" : numpy.array([1.0, 1.0, 1.0])}

    filename = "test_fiducials.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        for i in range(3):
            temp = {}
            for elt in peaks:
                temp[elt] = peaks[elt][i:]
            h5.addLocalizations(temp, i)

        h5.addMovieInformation(FakeReader(n_frames = 4))
        
    # Track fiducials..
    fiducials.trackFiducials(h5_name, radius = 0.1, reference_frame = 2)

    # Check.
    with saH5Py.SAH5Py(h5_name) as h5:
        expected = numpy.array([-1,-1,0])
        for fnum, locs in h5.localizationsIterator(fields = ["fiducial_id"]):
            assert numpy.allclose(locs["fiducial_id"], expected[fnum:])
Exemplo n.º 35
0
def test_sa_h5py_4():
    """
    Test handling of drift correction.
    """
    peaks = {"x" : numpy.zeros(10),
             "y" : numpy.ones(10)}

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        h5.addLocalizations(peaks, 1)
        h5.setDriftCorrection(1, dx = 1.0, dy = -1.0)

    # Read data.
    with saH5Py.SAH5Py(h5_name) as h5:

        # not corrected.
        locs = h5.getLocalizationsInFrame(1)
        assert(numpy.allclose(peaks["x"], locs["x"]))
        assert(numpy.allclose(peaks["y"], locs["y"]))

        # corrected.
        locs = h5.getLocalizationsInFrame(1, drift_corrected = True)
        assert(numpy.allclose(peaks["x"] + 1.0, locs["x"]))
        assert(numpy.allclose(peaks["y"] - 1.0, locs["y"]))
Exemplo n.º 36
0
def test_fiducials_9():
    """
    Test fiducial averaging.
    """
    peaks = {"x" : numpy.array([1.0, 2.0, 3.0]),
             "y" : numpy.array([1.0, 1.0, 1.0])}

    filename = "test_fiducials.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        for i in range(3):
            h5.addLocalizations(peaks, i)

        h5.addMovieInformation(FakeReader(n_frames = 3))
        
    # Track fiducials..
    fiducials.trackFiducials(h5_name, radius = 0.1)

    # Check
    with fiducials.SAH5Fiducials(h5_name) as h5:
        [ave, n] = h5.averageFiducials(fields = ["y"])
        assert(numpy.allclose(ave["y"], numpy.ones(3)))
Exemplo n.º 37
0
def test_hdf5_to_bin_2():
    """
    Test tracks conversion.
    """
    peaks = {"x" : numpy.zeros(10),
             "y" : numpy.ones(10)}

    h5_name = storm_analysis.getPathOutputTest("test_sa_hdf5.hdf5")
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        h5.addMetadata("<settings/>")
        h5.setMovieInformation(256, 256, 10, "XYZZY")
        h5.setPixelSize(100.0)
        h5.addTracks(peaks)

    # Convert.
    i3_name = storm_analysis.getPathOutputTest("test_mlist.bin")
    storm_analysis.removeFile(i3_name)
    hdf5ToBin.hdf5ToBin(h5_name, i3_name)

    # Load Insight3 file and check values.
    i3_data = readinsight3.loadI3File(i3_name, verbose = False)

    assert(numpy.allclose(peaks["x"], i3_data['x'] - 1.0))
    assert(numpy.allclose(peaks["y"], i3_data['y'] - 1.0))    
    assert(numpy.allclose(i3_data['fr'], numpy.ones(10)))
Exemplo n.º 38
0
def test_fiducials_7():
    """
    Iterator test.
    """
    peaks = {"x" : numpy.array([1.0, 2.0, 3.0]),
             "y" : numpy.array([1.0, 1.0, 1.0])}

    filename = "test_fiducials.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        for i in range(3):
            temp = {}
            for elt in peaks:
                temp[elt] = peaks[elt][i:]
            h5.addLocalizations(temp, i)

        h5.addMovieInformation(FakeReader(n_frames = 4))
        
    # Track fiducials..
    fiducials.trackFiducials(h5_name, radius = 0.1, reference_frame = 2)

    # Check.
    with fiducials.SAH5Fiducials(h5_name) as h5:
        for fdcl in h5.fiducialsIterator():
            assert(numpy.allclose(fdcl["frame"], numpy.arange(3)))
Exemplo n.º 39
0
def test_tracker_5():
    """
    Test that nearest track is assigned to the nearest object.
    """
    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        peaks = {"x" : numpy.array([1.0, 2.0, 3.0]),
                 "y" : numpy.array([1.0, 1.0, 1.0]),
                 "sum" : numpy.array([4.0, 4.0, 4.0])}
        h5.addLocalizations(peaks, 0)
        
        peaks = {"x" : numpy.array([2.0]),
                 "y" : numpy.array([1.0]),
                 "sum" : numpy.array([4.0])}
        h5.addLocalizations(peaks, 1)

        h5.addMovieInformation(FakeReader(n_frames = 2))

    # Track.
    tracker.tracker(h5_name, radius = 1.1)

    # Tracking.
    with saH5Py.SAH5Py(h5_name) as h5:
        assert(h5.getNTracks() == 3)
        for t in h5.tracksIterator():
            assert(numpy.allclose(numpy.array([1,3,2]), t["x"]))
            assert(numpy.allclose(numpy.array([0,2,1]), t["track_id"]))
            assert(numpy.allclose(numpy.array([1,1,2]), t["track_length"]))
def test_cl_sa_h5py_6():
    """
    Test getting all of the tracks for clustering.
    """
    tracks = {"category" : numpy.arange(4, dtype = numpy.int32),
              "x" : numpy.arange(4, dtype = numpy.float),
              "y" : numpy.arange(4, dtype = numpy.float),
              "z" : numpy.arange(4, dtype = numpy.float)}

    filename = "test_clusters_sa_h5py.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write tracks data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        h5.setMovieInformation(1,1,2,"")
        h5.setPixelSize(100.0)
        h5.addTracks(tracks)
        h5.addTracks(tracks)

    # Test getting all the tracking data.
    with clSAH5Py.SAH5Clusters(h5_name) as cl_h5:
        [x, y, z, c, cl_dict] = cl_h5.getDataForClustering()
        assert(numpy.allclose(x, cl_dict['loc_id']))
        assert(numpy.allclose(y, cl_dict['loc_id']))
        assert(numpy.allclose(z, cl_dict['loc_id']))
        assert(numpy.allclose(c, cl_dict['loc_id']))
        assert(numpy.allclose(cl_dict['track_id'], numpy.array([0,0,0,0,1,1,1,1])))
Exemplo n.º 41
0
def test_tracker_6():
    """
    Test max_gap parameter.
    """
    peaks = {"x" : numpy.array([1.0, 2.0, 3.0]),
             "y" : numpy.array([1.0, 1.0, 1.0]),
             "sum" : numpy.array([4.0, 4.0, 4.0])}

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        h5.addLocalizations(peaks, 0)
        h5.addLocalizations(peaks, 2)
        h5.addMovieInformation(FakeReader(n_frames = 3))

    # Track.
    tracker.tracker(h5_name, radius = 0.1)

    # Tracking.
    with saH5Py.SAH5Py(h5_name) as h5:
        assert(h5.getNTracks() == 6)
        for t in h5.tracksIterator():
            assert(numpy.allclose(t["track_length"], numpy.ones(6)))

    # Redo the tracking allowing single frame gaps.
    tracker.tracker(h5_name, max_gap = 1, radius = 0.1)

    with saH5Py.SAH5Py(h5_name) as h5:
        assert(h5.getNTracks() == 3)
        for t in h5.tracksIterator():
            assert(numpy.allclose(t["track_length"], 2.0*numpy.ones(3)))
Exemplo n.º 42
0
def test_tracker_8():
    """
    Test tracking over an empty frame.
    """
    peaks = {"x" : numpy.array([1.0, 2.0, 3.0]),
             "y" : numpy.array([1.0, 1.0, 1.0]),
             "sum" : numpy.array([4.0, 4.0, 4.0])}

    empty = {"x" : numpy.array([]),
             "y" : numpy.array([]),
             "sum" : numpy.array([])} 

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        h5.addLocalizations(peaks, 0)
        h5.addLocalizations(empty, 1)
        h5.addLocalizations(peaks, 2)
        h5.addMovieInformation(FakeReader(n_frames = 3))

    # Track.
    tracker.tracker(h5_name, descriptor = "111", radius = 0.1)

    # Tracking.
    with saH5Py.SAH5Py(h5_name) as h5:
        assert(h5.getNTracks() == 6)
        for t in h5.tracksIterator():
            assert(numpy.allclose(numpy.ones(6), t["track_length"]))
def test_cl_sa_h5py_4():
    """
    Test cluster info string round trip.
    """
    locs = {"x" : numpy.arange(10, dtype = numpy.float),
            "y" : numpy.arange(10, dtype = numpy.float)}

    filename = "test_clusters_sa_h5py.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write localization data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        h5.setMovieInformation(1,1,2,"")
        h5.addLocalizations(locs, 1)

    # Write clustering data for localizations.
    cluster_id = numpy.remainder(numpy.arange(10), 3)
    cluster_data = {"frame" : numpy.ones(10, dtype = numpy.int),
                    "loc_id" : numpy.arange(10)}

    info_string = "dbscan,eps,10.0,mc,5"
    with clSAH5Py.SAH5Clusters(h5_name) as cl_h5:
        cl_h5.addClusters(cluster_id, cluster_data)

        cl_h5.setClusteringInfo(info_string)
        assert (cl_h5.getClusteringInfo() == info_string)
Exemplo n.º 44
0
def test_sa_h5py_19():
    """
    Test getting specific fields.
    """
    peaks = {"bar" : numpy.zeros(10),
             "x" : numpy.zeros(10),
             "y" : numpy.zeros(10)}

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)
    
    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing = False, overwrite = True) as h5:
        h5.setMovieInformation(100, 100, 1, "")
        h5.addLocalizations(peaks, 0)

    # Get data.
    with saH5Py.SAH5Py(h5_name) as h5:
        locs = h5.getLocalizationsInFrame(0)
        for elt in ["bar", "x", "y"]:
            assert elt in locs

        locs = h5.getLocalizationsInFrame(0, fields = ["x"])
        assert "x" in locs
        for elt in ["bar", "y"]:
            assert not elt in locs
Exemplo n.º 45
0
def _test_psf_to_spline_2D():

    psf = storm_analysis.getPathOutputTest("test_spliner_psf_2d.psf")
    spline = storm_analysis.getPathOutputTest("test_spliner_psf_2d.spline")
    storm_analysis.removeFile(spline)

    from storm_analysis.spliner.psf_to_spline import psfToSpline
    psfToSpline(psf, spline, 7)
Exemplo n.º 46
0
def test_setup_A_matrix():

    # Test setupAMatrix.
    a_matrix_file = storm_analysis.getPathOutputTest("test_l1h")
    storm_analysis.removeFile(a_matrix_file)

    from storm_analysis.L1H.setup_A_matrix import setupAMatrix
    setupAMatrix("theoritical", a_matrix_file, 1.0, False)
Exemplo n.º 47
0
def test_setup_A_matrix():

    # Test setupAMatrix.
    a_matrix_file = storm_analysis.getPathOutputTest("test_l1h")
    storm_analysis.removeFile(a_matrix_file)

    from storm_analysis.L1H.setup_A_matrix import setupAMatrix
    setupAMatrix("theoritical", a_matrix_file, 1.0, False)
Exemplo n.º 48
0
def test_measure_psf():

    movie = storm_analysis.getData("test/data/test_spliner.dax")
    mlist = storm_analysis.getData("test/data/test_spliner_ref.hdf5")
    psf = storm_analysis.getPathOutputTest("test_spliner_psf.psf")
    storm_analysis.removeFile(psf)

    from storm_analysis.spliner.measure_psf import measurePSF
    measurePSF(movie, "", mlist, psf)
Exemplo n.º 49
0
def test_3ddao_2d_fixed_non_square():

    movie_name = storm_analysis.getData("test/data/test_300x200.dax")
    settings = storm_analysis.getData("test/data/test_3d_2d_fixed.xml")
    mlist = storm_analysis.getPathOutputTest("test_3d_2d_300x200.bin")
    storm_analysis.removeFile(mlist)

    from storm_analysis.daostorm_3d.mufit_analysis import analyze
    analyze(movie_name, mlist, settings)
Exemplo n.º 50
0
def test_homotopy_psf():

    movie = storm_analysis.getData("test/data/test.dax")
    mlist = storm_analysis.getData("test/data/test_olist.bin")
    psf = storm_analysis.getPathOutputTest("l1h_psf.psf")
    storm_analysis.removeFile(psf)

    from storm_analysis.L1H.homotopy_psf import homotopyPSF
    homotopyPSF(movie, mlist, psf)
Exemplo n.º 51
0
def test_3ddao_Z():

    movie_name = storm_analysis.getData("test/data/test.dax")
    settings = storm_analysis.getData("test/data/test_3d_Z.xml")
    mlist = storm_analysis.getPathOutputTest("test_3d_Z.bin")
    storm_analysis.removeFile(mlist)

    from storm_analysis.daostorm_3d.mufit_analysis import analyze
    analyze(movie_name, mlist, settings)
Exemplo n.º 52
0
def test_measure_psf_2D():

    movie = storm_analysis.getData("test/data/test.dax")
    mlist = storm_analysis.getData("test/data/test_ref.hdf5")
    psf = storm_analysis.getPathOutputTest("test_spliner_psf_2d.psf")
    storm_analysis.removeFile(psf)

    from storm_analysis.spliner.measure_psf import measurePSF
    measurePSF(movie, "", mlist, psf, want2d=True, aoi_size=5)
Exemplo n.º 53
0
def test_homotopy_psf():

    movie = storm_analysis.getData("test/data/test.dax")
    mlist = storm_analysis.getData("test/data/test_olist.bin")
    psf = storm_analysis.getPathOutputTest("l1h_psf.psf")
    storm_analysis.removeFile(psf)

    from storm_analysis.L1H.homotopy_psf import homotopyPSF
    homotopyPSF(movie, mlist, psf)
Exemplo n.º 54
0
def create2DSpline():
    movie = storm_analysis.getData("test/data/test.dax")
    mlist = storm_analysis.getData("test/data/test_ref.hdf5")
    psf = storm_analysis.getPathOutputTest("test_spliner_psf_2d.psf")
    spline = storm_analysis.getPathOutputTest("test_spliner_psf_2d.spline")

    storm_analysis.removeFile(psf)
    storm_analysis.removeFile(spline)

    measurePSF.measurePSF(movie, "", mlist, psf, want2d=True, aoi_size=5)
    psfToSpline.psfToSpline(psf, spline, 4)
Exemplo n.º 55
0
def test_voronoi_clustering_1():
    numpy.random.seed(1)

    filename = "test_clustering_sa_h5py.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write tracks data.
    category = numpy.zeros(10, dtype=numpy.int32)
    x = 10.0 * numpy.arange(10)
    y = 10.0 * numpy.arange(10)
    z = numpy.zeros(10)

    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        h5.setMovieInformation(1, 1, 2, "")
        h5.setPixelSize(1.0)

        for i in range(100):
            tracks = {
                "category": category,
                "x": x + numpy.random.normal(scale=0.1, size=10),
                "y": y + numpy.random.normal(scale=0.1, size=10),
                "z": z + numpy.random.normal(scale=0.1, size=10)
            }

            h5.addTracks(tracks)

    # Cluster data with voronoi.
    voronoiAnalysis.findClusters(h5_name, 0.1, 10, verbose=False)

    # Check clustering results.
    with clSAH5Py.SAH5Clusters(h5_name) as cl_h5:
        assert (cl_h5.getNClusters() == 10)
        for index, cluster in cl_h5.clustersIterator(skip_unclustered=True,
                                                     fields=["x", "y", "z"]):
            for elt in ['x', 'y', 'z']:
                dev = numpy.std(cluster[elt])
                assert (dev > 0.07)
                assert (dev < 0.12)

    # Calculate common cluster statistics.
    stats_name = dbscanAnalysis.clusterStats(h5_name, 50, verbose=False)

    # Check statistics.
    stats = numpy.loadtxt(stats_name, skiprows=1)
    index = numpy.argsort(stats[:, 3])
    assert (stats.shape[0] == 10)
    assert (numpy.allclose(stats[:, 0], numpy.arange(10) + 1))
    assert (numpy.allclose(stats[:, 1], numpy.zeros(10)))
    assert (numpy.count_nonzero(
        numpy.greater(stats[:, 2], 80.0 * numpy.ones(10))) == 10)
    assert (numpy.allclose(stats[index, 3], x, rtol=0.2, atol=2.0))
    assert (numpy.allclose(stats[index, 4], y, rtol=0.2, atol=2.0))
    assert (numpy.allclose(stats[index, 5], z, rtol=0.2, atol=20.0))
Exemplo n.º 56
0
def create3DSpline():

    movie = storm_analysis.getData("test/data/test_spliner.dax")
    mlist = storm_analysis.getData("test/data/test_spliner_ref.hdf5")
    psf = storm_analysis.getPathOutputTest("test_spliner_psf.psf")
    spline = storm_analysis.getPathOutputTest("test_spliner_psf.spline")

    storm_analysis.removeFile(psf)
    storm_analysis.removeFile(spline)

    measurePSF.measurePSF(movie, "", mlist, psf, aoi_size=6)
    psfToSpline.psfToSpline(psf, spline, 5)
Exemplo n.º 57
0
def test_align_merge_2():
    """
    Test aligning and merging two HDF5 files with offset.
    """
    n_locs = 500
    tracks = {"x" : numpy.random.normal(loc = 10.0, scale = 0.2, size = n_locs),
              "y" : numpy.random.normal(loc = 10.0, scale = 0.2, size = n_locs),
              "z" : numpy.random.normal(scale = 0.05, size = n_locs)}

    h5_in1 = storm_analysis.getPathOutputTest("test_align_merge_1.hdf5")
    h5_in2 = storm_analysis.getPathOutputTest("test_align_merge_2.hdf5")
    h5_alm = storm_analysis.getPathOutputTest("test_align_merge_3.hdf5")

    # Create input files.
    t_dx = 2.0
    t_dz = 0.3
    with saH5Py.SAH5Py(h5_in1, is_existing = False, overwrite = True) as h5:
        h5.addMetadata("<xml><field1><data1>1</data1></field></xml>")
        h5.setMovieInformation(20, 20, 2, "")
        h5.setPixelSize(100.0)
        h5.addTracks(tracks)

    with saH5Py.SAH5Py(h5_in2, is_existing = False, overwrite = True) as h5:
        h5.addMetadata("<xml><field1><data1>2</data1></field></xml>")
        h5.setMovieInformation(20, 20, 2, "")
        h5.setPixelSize(100.0)

        tracks["x"] += t_dx
        tracks["z"] += t_dz
        h5.addTracks(tracks)

    # Align and merge with offset.
    storm_analysis.removeFile(h5_alm)
    [dx, dy, dz] = alignAndMerge.alignAndMerge(h5_in1, h5_in2, h5_alm, dx = -t_dx)

    # Check that we got the right offsets.
    assert(numpy.allclose(numpy.array([dx, dy, dz]),
                          numpy.array([-t_dx, 0.0, -t_dz]),
                          atol = 0.001,
                          rtol = 0.1))


    # Check that the output file is correctly aligned.
    with saH5Py.SAH5Py(h5_alm) as h5:
        tracks = h5.getTracks(fields = ["x", "y", "z"])
        assert(numpy.allclose(numpy.array([numpy.std(tracks["x"]),
                                           numpy.std(tracks["y"]),
                                           numpy.std(tracks["z"])]),
                              numpy.array([0.2, 0.2, 0.05]),
                              atol = 0.001,
                              rtol = 0.1))
Exemplo n.º 58
0
def test_scmos_Z():

    movie_name = storm_analysis.getData("test/data/test.dax")
    settings = storm_analysis.getData("test/data/test_sc_Z.xml")
    mlist = storm_analysis.getPathOutputTest("test_sc_Z.bin")
    storm_analysis.removeFile(mlist)

    from storm_analysis.sCMOS.scmos_analysis import analyze
    analyze(movie_name, mlist, settings)

    # Verify number of localizations found.
    num_locs = veri.verifyNumberLocalizations(mlist)
    if not veri.verifyIsCloseEnough(num_locs, 1958):
        raise Exception(
            "sCMOS Z did not find the expected number of localizations.")
Exemplo n.º 59
0
def test_3ddao_Z():

    movie_name = storm_analysis.getData("test/data/test.dax")
    settings = storm_analysis.getData("test/data/test_3d_Z.xml")
    mlist = storm_analysis.getPathOutputTest("test_3d_Z.hdf5")
    storm_analysis.removeFile(mlist)

    from storm_analysis.daostorm_3d.mufit_analysis import analyze
    analyze(movie_name, mlist, settings)

    # Verify number of localizations found.
    num_locs = veri.verifyNumberLocalizations(mlist)
    if not veri.verifyIsCloseEnough(num_locs, 1955):
        raise Exception(
            "3D-DAOSTORM Z did not find the expected number of localizations.")
Exemplo n.º 60
0
def test_3ddao_2d_fixed_non_square():

    movie_name = storm_analysis.getData("test/data/test_300x200.dax")
    settings = storm_analysis.getData("test/data/test_3d_2d_fixed.xml")
    mlist = storm_analysis.getPathOutputTest("test_3d_2d_300x200.bin")
    storm_analysis.removeFile(mlist)

    from storm_analysis.daostorm_3d.mufit_analysis import analyze
    analyze(movie_name, mlist, settings)

    # Verify number of localizations found.
    num_locs = veri.verifyNumberLocalizations(mlist)
    if not veri.verifyIsCloseEnough(num_locs, 991):
        raise Exception(
            "3D-DAOSTORM 2D fixed non square did not find the expected number of localizations."
        )