Ejemplo n.º 1
0
def collateRQE(dirs, settings):
    """
    Results collation for RQE correction.
    """
    for a_dir in dirs:
        print("Processing", a_dir)

        t_locs = saH5Py.loadLocalizations("grid_list.hdf5", fields=["x", "y"])

        t_locs_found = numpy.zeros_like(t_locs["x"])

        n_frames = 0
        with saH5Py.SAH5Py(os.path.join(a_dir, "test.hdf5")) as h5:
            for i in range(h5.getMovieLength()):
                n_frames += 1

                m_locs = h5.getLocalizationsInFrame(i, fields=["x", "y"])
                dist = iaUtilsC.peakToPeakDistAndIndex(t_locs['x'],
                                                       t_locs['y'],
                                                       m_locs['x'],
                                                       m_locs['y'],
                                                       max_distance=3)[0]

                for j in range(dist.size):
                    if (dist[j] > -0.1):
                        t_locs_found[j] += 1

        # Check results against the binomial distribution.
        p = numpy.sum(t_locs_found) / (n_frames * t_locs["x"].size)
        print("  Mean P found     : {0:.3f}".format(p))
        print("  Expected variance: {0:.3f}".format(n_frames * p * (1 - p)))
        print("  Actual variance  : {0:.3f}".format(numpy.var(t_locs_found)))
        print()
Ejemplo n.º 2
0
def fitzTracks(h5_name, cutoff, wx_params, wy_params, z_min, z_max, z_step):
    """
    This processes the tracked localizations.

    Note: Localizations whose wx/wy values are too far from the calibration
          curve will be given a z value that is less than z_min and also
          assigned to category 9.
    """
    zfit_data = c_fitz.initialize(numpy.ascontiguousarray(wx_params),
                                  numpy.ascontiguousarray(wy_params),
                                  z_min * 1000.0, z_max * 1000.0,
                                  z_step * 1000.0, cutoff)

    # Fit tracked localizations & save z value (in microns).
    with saH5Py.SAH5Py(h5_name) as h5:
        pixel_size = h5.getPixelSize()
        for index, locs in enumerate(h5.tracksIterator()):
            z_vals = numpy.zeros(locs["xsigma"].size, dtype=numpy.float64)
            for i in range(locs["xsigma"].size):
                wx = pixel_size * 2.0 * locs["xsigma"][i] / locs[
                    "track_length"][i]
                wy = pixel_size * 2.0 * locs["ysigma"][i] / locs[
                    "track_length"][i]
                z_vals[i] = c_fitz.findBestZ(zfit_data, wx, wy) * 1.0e-3
            h5.addTrackData(z_vals, index, "z")

    c_fitz.cleanup(zfit_data)
Ejemplo n.º 3
0
def loadWxWyZData(h5_name, zfile_name):
    """
    h5_name - The name of the HDF5 localization file.
    zfile_name - The name of the text file containing z offset data.
    """
    # This file contains two columns, the first is whether or not
    # the data in this frame should be used (0 = No, 1 = Yes) and
    # the second contains the z offset in microns.
    #
    # For movies acquired using storm-control this can be created
    # from the .off file using storm_analysis/spliner/offset_to_z.py.
    #
    z_data = numpy.loadtxt(zfile_name, ndmin=2)

    # Create arrays with wx, wy, z data.
    wx = None
    wy = None
    z = None
    with saH5Py.SAH5Py(h5_name) as h5:
        pixel_size = h5.getPixelSize()
        for curf, locs in h5.localizationsIterator(
                fields=["xsigma", "ysigma"]):
            if (int(z_data[curf, 0]) == 0):
                continue
            if wx is None:
                wx = 2.0 * locs["xsigma"]
                wy = 2.0 * locs["ysigma"]
                z = numpy.ones(wx.size) * z_data[curf, 1]
            else:
                wx = numpy.concatenate((wx, 2.0 * locs["xsigma"]))
                wy = numpy.concatenate((wy, 2.0 * locs["ysigma"]))
                z = numpy.concatenate(
                    (z, numpy.ones(locs["xsigma"].size) * z_data[curf, 1]))

    return [wx, wy, z, pixel_size]
Ejemplo n.º 4
0
def test_fiducials_4():
    """
    Test no localizations in reference frame.
    """
    peaks = {
        "x": numpy.array([1.0, 2.0, 3.0]),
        "y": numpy.array([1.0, 1.0, 1.0])
    }

    filename = "test_fiducials.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        for i in range(3):
            h5.addLocalizations(peaks, i)
        h5.addMovieInformation(FakeReader(n_frames=5))

    # Track fiducials..
    okay = False
    try:
        fiducials.trackFiducials(h5_name, radius=0.1, reference_frame=3)
    except fiducials.FiducialException:
        okay = True
    assert okay
Ejemplo n.º 5
0
def test_sa_h5py_14():
    """
    Test gridding tracks with dx, dy.
    """
    tracks = {
        "x": numpy.array([10, 20, 30]),
        "y": numpy.array([10, 10, 10]),
        "z": numpy.array([-0.2, 0.0, 0.2])
    }

    dx = 1
    dy = 2
    h5_name = storm_analysis.getPathOutputTest("test_sa_hdf5.hdf5")

    # Tracks.
    with saH5Py.SAH5Py(h5_name, is_existing=False, overwrite=True) as h5:
        h5.setMovieInformation(40, 40, 1, "")
        h5.addTracks(tracks)

    with saH5Py.SAH5Grid(filename=h5_name, scale=1, z_bins=3) as h5g:
        im_2d = h5g.gridTracks2D(dx=dx, dy=dy)
        im_3d = h5g.gridTracks3D(-0.201, 0.201, dx=dx, dy=dy)

        for i in range(tracks["x"].size):
            assert (im_2d[int(tracks["x"][i] + dx),
                          int(tracks["y"][i] + dy)] == 1)
            assert (im_3d[int(tracks["x"][i] + dx),
                          int(tracks["y"][i] + dy), i] == 1)
Ejemplo n.º 6
0
def overlayImage(movie_name, locs_name, frame_number, sx=8, sy=8):
    """
    Create an image of a frame with the localizations overlaid.
    """
    frame = datareader.inferReader(movie_name).loadAFrame(frame_number).astype(
        numpy.float64)
    with saH5Py.SAH5Py(locs_name) as h5:
        locs = h5.getLocalizationsInFrame(0)

    frame = frame - numpy.min(frame)
    frame = frame / numpy.max(frame)

    fig = pyplot.figure(figsize=(sx, sy))
    ax = fig.add_subplot(1, 1, 1)
    ax.imshow(frame, interpolation='nearest', cmap="gray")
    for i in range(locs["x"].size):
        width = 10
        height = 10
        if "xsigma" in locs:
            width = height = 5.0 * locs["xsigma"][i]
        if "ysigma" in locs:
            height = 5.0 * locs["ysigma"][i]
        ellipse = patches.Ellipse((locs["x"][i], locs["y"][i]),
                                  width,
                                  height,
                                  facecolor='none',
                                  edgecolor='g',
                                  linewidth=2)
        ax.add_artist(ellipse)

    #ax.scatter(locs["x"], locs["y"], s = 200,
    ax.set_title("Overlay Image")

    pyplot.show()
def test_cl_sa_h5py_2():
    """
    Test basic cluster file mechanics (using tracks).
    """
    tracks = {
        "x": numpy.arange(11, dtype=numpy.float),
        "y": numpy.arange(11, dtype=numpy.float)
    }

    filename = "test_clusters_sa_h5py.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write track data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        h5.setMovieInformation(1, 1, 2, "")
        h5.addTracks(tracks)

    # Write clustering data for tracks.
    cluster_id = numpy.remainder(numpy.arange(11), 3)
    cluster_data = {
        "track_id": numpy.zeros(11, dtype=numpy.int),
        "loc_id": numpy.arange(11)
    }

    cl_size = [0, 4, 4, 3]
    with clSAH5Py.SAH5Clusters(h5_name) as cl_h5:
        cl_h5.addClusters(cluster_id, cluster_data)

        assert (cl_h5.getNClusters() == (len(cl_size) - 1))
        for index, cluster in cl_h5.clustersIterator(skip_unclustered=False):
            for field in cluster:
                assert (cluster[field].size == cl_size[index])
Ejemplo n.º 8
0
def test_fiducials_7():
    """
    Iterator test.
    """
    peaks = {
        "x": numpy.array([1.0, 2.0, 3.0]),
        "y": numpy.array([1.0, 1.0, 1.0])
    }

    filename = "test_fiducials.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        for i in range(3):
            temp = {}
            for elt in peaks:
                temp[elt] = peaks[elt][i:]
            h5.addLocalizations(temp, i)

        h5.addMovieInformation(FakeReader(n_frames=4))

    # Track fiducials..
    fiducials.trackFiducials(h5_name, radius=0.1, reference_frame=2)

    # Check.
    with fiducials.SAH5Fiducials(h5_name) as h5:
        for fdcl in h5.fiducialsIterator():
            assert (numpy.allclose(fdcl["frame"], numpy.arange(3)))
def test_cl_sa_h5py_5():
    """
    Test getting all of the localizations for clustering.
    """
    locs = {
        "category": numpy.arange(4, dtype=numpy.int32),
        "x": numpy.arange(4, dtype=numpy.float),
        "y": numpy.arange(4, dtype=numpy.float)
    }

    filename = "test_clusters_sa_h5py.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write localization data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        h5.setMovieInformation(1, 1, 5, "")
        h5.setPixelSize(100.0)
        h5.addLocalizations(locs, 1)
        h5.addLocalizations(locs, 3)

    # Test getting all the localization data.
    with clSAH5Py.SAH5Clusters(h5_name) as cl_h5:
        [x, y, z, c, cl_dict] = cl_h5.getDataForClustering()
        assert (numpy.allclose(x, cl_dict['loc_id']))
        assert (numpy.allclose(y, cl_dict['loc_id']))
        assert (numpy.allclose(z, numpy.zeros(x.size)))
        assert (numpy.allclose(c, cl_dict['loc_id']))
        assert (numpy.allclose(cl_dict['frame'],
                               numpy.array([1, 1, 1, 1, 3, 3, 3, 3])))
def test_cl_sa_h5py_6():
    """
    Test getting all of the tracks for clustering.
    """
    tracks = {
        "category": numpy.arange(4, dtype=numpy.int32),
        "x": numpy.arange(4, dtype=numpy.float),
        "y": numpy.arange(4, dtype=numpy.float),
        "z": numpy.arange(4, dtype=numpy.float)
    }

    filename = "test_clusters_sa_h5py.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write tracks data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        h5.setMovieInformation(1, 1, 2, "")
        h5.setPixelSize(100.0)
        h5.addTracks(tracks)
        h5.addTracks(tracks)

    # Test getting all the tracking data.
    with clSAH5Py.SAH5Clusters(h5_name) as cl_h5:
        [x, y, z, c, cl_dict] = cl_h5.getDataForClustering()
        assert (numpy.allclose(x, cl_dict['loc_id']))
        assert (numpy.allclose(y, cl_dict['loc_id']))
        assert (numpy.allclose(z, cl_dict['loc_id']))
        assert (numpy.allclose(c, cl_dict['loc_id']))
        assert (numpy.allclose(cl_dict['track_id'],
                               numpy.array([0, 0, 0, 0, 1, 1, 1, 1])))
def test_cl_sa_h5py_4():
    """
    Test cluster info string round trip.
    """
    locs = {
        "x": numpy.arange(10, dtype=numpy.float),
        "y": numpy.arange(10, dtype=numpy.float)
    }

    filename = "test_clusters_sa_h5py.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write localization data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        h5.setMovieInformation(1, 1, 2, "")
        h5.addLocalizations(locs, 1)

    # Write clustering data for localizations.
    cluster_id = numpy.remainder(numpy.arange(10), 3)
    cluster_data = {
        "frame": numpy.ones(10, dtype=numpy.int),
        "loc_id": numpy.arange(10)
    }

    info_string = "dbscan,eps,10.0,mc,5"
    with clSAH5Py.SAH5Clusters(h5_name) as cl_h5:
        cl_h5.addClusters(cluster_id, cluster_data)

        cl_h5.setClusteringInfo(info_string)
        assert (cl_h5.getClusteringInfo() == info_string)
Ejemplo n.º 12
0
def checkAnalysis(dir_name):

    # Find all the job*.xml files.
    job_xml_files = glob.glob(os.path.join(dir_name, "job*.xml"))

    # Sort job files.
    job_xml_files = sorted(
        job_xml_files,
        key=lambda x: int(
            os.path.splitext(os.path.basename(x))[0].split("_")[1]))

    # Check for corresponding HDF5 files.
    incomplete = None
    for i in range(len(job_xml_files)):

        if ((i % 20) == 0):
            print("Checking", job_xml_files[i])

        h5_name = os.path.join(dir_name, "p_" + str(i + 1) + ".hdf5")
        if os.path.exists(h5_name):
            with saH5Py.SAH5Py(h5_name) as h5:
                if h5.isAnalysisFinished():
                    continue

        print("Job", job_xml_files[i], "is incomplete.")
        if incomplete is None:
            incomplete = str(i + 1)
        else:
            incomplete += "," + str(i + 1)

    if incomplete is not None:
        print("suggested job array string:")
        print(incomplete)
Ejemplo n.º 13
0
def fitTilt(h5_name, start=0, stop=1):
    """
    h5_name - The name of the HDF5 localization file (must be fit for z).

    Use the localizations in the range start <= frame number < stop. This 
    should be a region of the movie where the stage is near zero and also
    not moving.
    """

    # Load localizations.
    with saH5Py.SAH5Py(h5_name) as h5:
        locs = h5.getLocalizationsInFrameRange(start,
                                               stop,
                                               fields=["x", "y", "z"])

    # Find the best fit plane through x,y,z.
    def fitfn(p):
        zf = p[0] + p[1] * locs["x"] + p[2] * locs["y"]
        return locs["z"] - zf

    params = [scipy.mean(locs["z"]), 0.0, 0.0]
    [results, success] = scipy.optimize.leastsq(fitfn, params)

    if (success < 1) or (success > 4):
        raise ZCalibrationException("fitTilt: fit failed!")

    return results
Ejemplo n.º 14
0
def zCheck(h5_name, parameters):
    """
    Mark all locations outside of the specified z range as category 9.
    """
    [min_z, max_z] = parameters.getZRange()

    with saH5Py.SAH5Py(h5_name) as h5:

        # Localizations.
        if h5.hasLocalizationsField("z"):
            for fnum, locs in h5.localizationsIterator(fields = ["category", "z"]):
                if((fnum%2000)==0):
                    print(" frame", fnum)
                cat = locs["category"]
                z_mask = (locs["z"] < min_z) | (locs["z"] > max_z)
                cat[z_mask] = 9
                h5.addLocalizationData(cat, fnum, "category")

        # Tracks.
        if h5.hasTracks():
            for index, locs in enumerate(h5.tracksIterator(fields = ["category", "z"])):
                if((index%5)==0):
                    print(" track group", index)
                cat = locs["category"]
                z_mask = (locs["z"] < min_z) | (locs["z"] > max_z)
                cat[z_mask] = 9
                h5.addTrackData(cat, index, "category")
Ejemplo n.º 15
0
def test_fiducials_8():
    """
    Gap test.
    """
    peaks = {
        "x": numpy.array([1.0, 2.0, 3.0]),
        "y": numpy.array([1.0, 1.0, 1.0])
    }

    filename = "test_fiducials.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        for i in [0, 1, 3]:
            h5.addLocalizations(peaks, i)

        h5.addMovieInformation(FakeReader(n_frames=4))

    # Track fiducials..
    fiducials.trackFiducials(h5_name, radius=0.1, max_gap=1)

    # Check.
    with fiducials.SAH5Fiducials(h5_name) as h5:
        expected = numpy.array([0, 1, 3])
        for fdcl in h5.fiducialsIterator():
            assert (numpy.allclose(fdcl["frame"], expected))
Ejemplo n.º 16
0
def test_fitz_c_4():
    """
    Test that tracks with wx, wy values that are not near the calibration 
    curve are assigned z values less than z minimum.

    Their category remains unchanged as this is done in a separate step.
    """
    # Load 3D parameters.
    settings = storm_analysis.getData("test/data/test_3d_3d.xml")
    parameters = params.ParametersDAO().initFromFile(settings)

    [wx_params, wy_params] = parameters.getWidthParams()
    [min_z, max_z] = parameters.getZRange()
    pixel_size = parameters.getAttr("pixel_size")

    # Calculate widths.
    z_vals = numpy.arange(-250.0, 251.0, 50)
    [sx, sy] = fitzC.calcSxSy(wx_params, wy_params, z_vals)

    # Create HDF5 file with these widths.
    track_length = numpy.ones(sx.size)
    track_length[:2] = 2
    tracks = {
        "category": numpy.ones(sx.size, dtype=numpy.int32),
        "track_length": track_length,
        "x": numpy.zeros(sx.size),
        "xsigma": track_length * (sx / pixel_size + numpy.ones(sx.size)),
        "ysigma": track_length * (sy / pixel_size + numpy.ones(sx.size))
    }

    h5_name = storm_analysis.getPathOutputTest("test_sa_hdf5.hdf5")
    storm_analysis.removeFile(h5_name)

    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        h5.setMovieInformation(256, 256, 10, "XYZZY")
        h5.setPixelSize(pixel_size)
        h5.addTracks(tracks)

    # Calculate Z values.
    fitzC.fitzTracks(h5_name, 1.5, wx_params, wy_params, min_z, max_z, 1.0e-3)

    # Check Z values.
    with saH5Py.SAH5Py(h5_name) as h5:
        for tracks in h5.tracksIterator():
            assert (numpy.allclose(tracks["z"],
                                   min_z * numpy.ones(sx.size) - 1.0e-3))
            assert (numpy.allclose(tracks["category"], numpy.ones(sx.size)))
def test_drift_correction_8():
    """
    Test XY offset determination & correction with offset.
    """
    n_locs = 500
    peaks = {
        "x": numpy.random.normal(loc=10.0, scale=0.2, size=n_locs),
        "y": numpy.random.normal(loc=10.0, scale=0.2, size=n_locs)
    }

    h5_name = storm_analysis.getPathOutputTest("test_dc_hdf5.hdf5")

    # Save peaks.
    t_dx = 3.0
    t_dy = 1.0
    with saH5Py.SAH5Py(h5_name, is_existing=False, overwrite=True) as h5:
        h5.setMovieInformation(20, 20, 2, "")
        h5.addLocalizations(peaks, 0)
        peaks["x"] += t_dx
        peaks["y"] += t_dy
        h5.addLocalizations(peaks, 1)

    scale = 2
    with driftUtils.SAH5DriftCorrection(filename=h5_name, scale=scale) as h5d:
        h5d.setFrameRange(0, 1)
        im1 = h5d.grid2D()
        h5d.setFrameRange(1, 2)
        im2 = h5d.grid2D()

        # Check that both images have the same number localizations.
        assert (numpy.sum(im1) == numpy.sum(im2))

        # Measure offset.
        [corr, dx, dy, success
         ] = imagecorrelation.xyOffset(im1,
                                       im2,
                                       scale,
                                       center=[-t_dx * scale, -t_dy * scale])

        # Test that it succeeded.
        assert (success)

        # Test that we got the right answer.
        dx = dx / scale
        dy = dy / scale
        assert (numpy.allclose(numpy.array([dx, dy]),
                               numpy.array([-t_dx, -t_dy]),
                               atol=1.0e-6))

        # Test that we are correcting in the right direction.
        h5d.setDriftCorrectionXY(dx, dy)
        im2 = h5d.grid2D(drift_corrected=True)
        [corr, dx, dy, success] = imagecorrelation.xyOffset(im1, im2, scale)
        dx = dx / scale
        dy = dy / scale

        assert (numpy.allclose(numpy.array([dx, dy]),
                               numpy.array([0.0, 0.0]),
                               atol=1.0e-6))
Ejemplo n.º 18
0
def verifyNumberLocalizations(h5_name):
    """
    Return the number of localizations in a HDF5 file.
    """
    n_locs = None
    with saH5Py.SAH5Py(h5_name) as h5:
        n_locs = h5.getNLocalizations()
    return n_locs
Ejemplo n.º 19
0
def verifyZWasCalculated(h5_name):
    """
    Return the true if all the Z values are not exactly identical.
    """
    locs = None
    with saH5Py.SAH5Py(h5_name) as h5:
        locs = h5.getLocalizations(fields=["z"])
    return (numpy.std(locs["z"]) > 1.0e-6)
Ejemplo n.º 20
0
def test_sa_h5py_1():
    """
    Test metadata round trip.
    """
    metadata = "<xml><field1><data1>data</data1></field></xml>"

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write metadata.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        h5.addMetadata(metadata)

    # Read metadata.
    with saH5Py.SAH5Py(h5_name) as h5:
        assert (metadata == h5.getMetadata())
Ejemplo n.º 21
0
def psfZStack(movie_name, h5_filename, zstack_name, scmos_cal = None, aoi_size = 8, driftx = 0.0, drifty = 0.0):
    """
    movie_name - The movie file containing the z stack.
    h5_filename - The HDF5 file containing the localizations to use for the PSF measurement.
    zstack_name - The name of the file to save the zstack in.
    scmos_cal - The sCMOS calibration file.
    aoi_size - The AOI size in pixels.

    driftx, drifty are in units of pixels per frame, (bead x last frame - bead x first frame)/n_frames.
    """
    # Create appropriate reader.
    if scmos_cal is None:
        fr_reader = datareader.inferReader(movie_name)
    else:
        fr_reader = analysisIO.FrameReaderSCMOS(movie_file = movie_name,
                                                calibration_file = scmos_cal)
        
    [movie_x, movie_y, movie_len] = fr_reader.filmSize()
    
    # Load localizations.
    with saH5Py.SAH5Py(h5_filename) as h5:
        locs = h5.getLocalizations()
        x = locs["y"] + 1
        y = locs["x"] + 1

    # Measure Z stacks.
    z_stacks = []
    for i in range(x.size):
        z_stacks.append(numpy.zeros((4*aoi_size, 4*aoi_size, movie_len)))
        
    for i in range(movie_len):
        if((i%50)==0):
            print("Processing frame {0:0d}".format(i))

        # Load the frame. This also handles gain and offset correction.
        #
        frame = fr_reader.loadAFrame(i)

        # Subtract estimated background. This assumes that the image is
        # mostly background and that the background is uniform.
        #
        frame = frame - numpy.median(frame)
            
        for j in range(x.size):
            xf = x[j] + driftx * float(i)
            yf = y[j] + drifty * float(i)
            z_stacks[j][:,:,i] = measurePSFUtils.extractAOI(frame, aoi_size, xf, yf)

    # Save z_stacks.
    numpy.save(zstack_name + ".npy", z_stacks)

    # Save a (normalized) z_stack as tif for inspection purposes.
    z_stack = z_stacks[0]
    z_stack = z_stack/numpy.amax(z_stack)
    z_stack = z_stack.astype(numpy.float32)
    with tifffile.TiffWriter(zstack_name + ".tif") as tf:
        for i in range(movie_len):
            tf.save(z_stack[:,:,i])
Ejemplo n.º 22
0
def test_tracker_2():
    """
    Test descriptor.
    """
    peaks = {
        "x": numpy.array([1.0, 2.0, 3.0]),
        "y": numpy.array([1.0, 1.0, 1.0]),
        "sum": numpy.array([4.0, 4.0, 4.0])
    }

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        for i in range(4):
            temp = {}
            for elt in peaks:
                temp[elt] = peaks[elt][:i]
            if (len(temp["x"]) > 0):
                h5.addLocalizations(temp, i)

        h5.addMovieInformation(FakeReader(n_frames=4))

    # Track.
    tracker.tracker(h5_name, descriptor="1212", radius=0.1)

    # Tracking.
    with saH5Py.SAH5Py(h5_name) as h5:
        assert (h5.getNTracks() == 3)
        for t in h5.tracksIterator():
            assert (numpy.allclose(peaks["x"], t["x"]))
            assert (numpy.allclose(peaks["y"], t["y"]))
            assert (numpy.allclose(numpy.array([1, 0, 1]), t["category"]))

        # Check localization categories.
        for fnum, locs in h5.localizationsIterator(fields=["category"]):
            if (fnum == 1):
                assert (numpy.allclose(numpy.array([1]), locs["category"]))
            if (fnum == 2):
                assert (numpy.allclose(numpy.array([0, 0]), locs["category"]))
            if (fnum == 3):
                assert (numpy.allclose(numpy.array([1, 1, 1]),
                                       locs["category"]))
Ejemplo n.º 23
0
    def __init__(self, filename=None, **kwds):
        super(MoleculeListHDF5, self).__init__(**kwds)

        self.fields = [
            "x", "y", "z", "background", "error", "height", "sum", "xsigma",
            "ysigma", "category", "iterations", "significance"
        ]

        self.reader = saH5Py.SAH5Py(filename)
def emittersInClusters(h5_name, ncl, nlocs, dev, sigma = 1.5, sx = 256, sy = 256, z_start = -0.5, z_stop = 0.5):
    """
    h5_name - The name of the HDF5 file to save the emitter locations, etc.
    ncl - The number of clusters.
    nlocs - The number of localizations per cluster.
    dev - Cluster standard deviation in pixels.
    sigma - The sigma for the localizatoins, default is 1.5 pixels.
    sx - Image x size in pixels, default is 256.
    sy - Image y size in pixels, default is 256.
    z_start - Starting value for z position in microns, default is -0.5um.
    z_stop = Stopping value for z position in microns, default is 0.5um.
    """
                    
    # First, create a list of cluster centers.
    cl_centers = []
    while (len(cl_centers) < ncl):
        cx = random.uniform(0.0, sx)
        cy = random.uniform(0.0, sy)
        cz = random.uniform(z_start, z_stop)

        # Don't keep the cluster if it is too close to the edge of the image.
        if (cx < 2.0) or (cx > (sx - 2.0)):
            continue
        if (cy < 2.0) or (cy > (sy - 2.0)):
            continue

        cl_centers.append([cx, cy, cz])

    # Next, create localizations for each cluster.
    xp = None
    yp = None
    zp = None
    for clc in cl_centers:

        if xp is None:
            xp = numpy.random.normal(scale = dev, size = nlocs) + clc[0]
            yp = numpy.random.normal(scale = dev, size = nlocs) + clc[1]

            # Z is in microns, we'll assume a 100nm pixel size.
            zp = numpy.random.normal(scale = dev * 0.1, size = nlocs) + clc[2]
        else:
            xp = numpy.append(xp, numpy.random.normal(scale = dev, size = nlocs) + clc[0])
            yp = numpy.append(yp, numpy.random.normal(scale = dev, size = nlocs) + clc[1])
            zp = numpy.append(zp, numpy.random.normal(scale = dev * 0.1, size = nlocs) + clc[2])

    # Create a molecule list structure & save it.
    peaks = {}
    peaks["x"] = xp
    peaks["y"] = yp
    peaks["z"] = zp
    peaks["xsigma"] = sigma*numpy.ones(xp.size)
    peaks["ysigma"] = sigma*numpy.ones(yp.size)

    # Save localizations.
    with saH5Py.SAH5Py(h5_name, is_existing = False, overwrite = True) as h5:
        h5.setMovieInformation(sx, sy, 1, "")
        h5.addLocalizations(peaks, 0)
Ejemplo n.º 25
0
def verifyNumberLocalizations(h5_name):
    """
    Return the number of localizations in a HDF5 file.
    """
    n_locs = None
    with saH5Py.SAH5Py(h5_name) as h5:
        assert (h5.isAnalysisFinished())
        n_locs = h5.getNLocalizations()
    return n_locs
Ejemplo n.º 26
0
def test_tracker_1():
    """
    Basic tracking test.
    """
    peaks = {
        "x": numpy.array([1.0, 2.0, 3.0]),
        "y": numpy.array([1.0, 1.0, 1.0]),
        "sum": numpy.array([4.0, 4.0, 4.0])
    }

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        for i in range(3):
            temp = {}
            for elt in peaks:
                temp[elt] = peaks[elt][i:]
            h5.addLocalizations(temp, i)

        h5.addMovieInformation(FakeReader(n_frames=3))

    # Track.
    tracker.tracker(h5_name, radius=0.1)

    # Tracking.
    with saH5Py.SAH5Py(h5_name) as h5:

        # Check that we have the right number of tracks.
        assert (h5.getNTracks() == 3)

        # Check tracks.
        for t in h5.tracksIterator():
            assert (numpy.allclose(peaks["x"], t["x"]))
            assert (numpy.allclose(peaks["y"], t["y"]))
            assert (numpy.allclose(numpy.array([0, 0, 0]), t["frame_number"]))
            assert (numpy.allclose(numpy.array([1, 2, 3]), t["track_length"]))

        # Check that the localizations 'track_id' field is correct.
        for fnum, locs in h5.localizationsIterator(fields=["track_id"]):
            assert (numpy.allclose(
                numpy.array([0, 1, 2])[fnum:], locs["track_id"]))
Ejemplo n.º 27
0
def mergeHDF5(hdf5_files, results_file):
    """
    Note: This only merges the tracks not the localizations.
    """
    with saH5Py.SAH5Py(results_file, is_existing = False) as h5_out:
        for i, h5_name in enumerate(hdf5_files):
            with saH5Py.SAH5Py(h5_name) as h5_in:
                if (i == 0):
                    [mx, my] = h5_in.getMovieInformation()[:2]
                    h5_out.setMovieInformation(mx, my, 0, "")
                    h5_out.setPixelSize(h5_in.getPixelSize())
                    h5_out.addMetadata(h5_in.getMetadata())

                for tracks in h5_in.tracksIterator():
                    sys.stdout.write(".")
                    sys.stdout.flush()
                    h5_out.addTracks(tracks)

                sys.stdout.write("\n")
Ejemplo n.º 28
0
def test_std_analysis_1():
    """
    Test zCheck.
    """
    # Load 3D parameters.
    settings = storm_analysis.getData("test/data/test_3d_3d.xml")
    parameters = params.ParametersDAO().initFromFile(settings)

    [min_z, max_z] = parameters.getZRange()
    assert (abs(min_z + 0.5) < 1.0e-6)
    assert (abs(max_z - 0.5) < 1.0e-6)

    # Create HDF5 file with localizations and tracks.
    zvals = numpy.arange(-1.0, 1.05, 0.2)

    peaks = {
        "category": numpy.ones(zvals.size, dtype=numpy.int32),
        "x": numpy.zeros(zvals.size),
        "z": zvals
    }

    h5_name = storm_analysis.getPathOutputTest("test_sa_hdf5.hdf5")
    storm_analysis.removeFile(h5_name)

    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        h5.setMovieInformation(256, 256, 10, "XYZZY")
        h5.addLocalizations(peaks, 1)
        h5.addTracks(peaks)

    # Run z check on the file.
    stdAnalysis.zCheck(h5_name, parameters)

    # Check track and localization categories.
    category = numpy.ones(zvals.size, dtype=numpy.int32)
    z_mask = (zvals < min_z) | (zvals > max_z)
    category[z_mask] = 9

    with saH5Py.SAH5Py(h5_name) as h5:
        for fnum, locs in h5.localizationsIterator(fields=["category"]):
            assert (numpy.allclose(locs["category"], category))

        for tracks in h5.tracksIterator(fields=["category"]):
            assert (numpy.allclose(tracks["category"], category))
Ejemplo n.º 29
0
def collate():
    dirs = sorted(glob.glob("test*"))

    if (len(dirs) == 0):
        print("No test directories found.")
        return

    # Adjust z positions in the channel 0 reference.
    for a_dir in dirs:
        with saH5Py.SAH5Py(a_dir + "/test_c1_ref.hdf5") as h5_in:
            with saH5Py.SAH5Py(a_dir + "/test_ref.hdf5",
                               is_existing=False,
                               overwrite=True) as h5_out:
                h5_out.setMovieInformation(*h5_in.getMovieInformation())

                for fnum, locs in h5_in.localizationsIterator():
                    locs["z"] -= settings.z_planes[0]
                    h5_out.addLocalizations(locs, fnum)

    collateResults.collateDAO(dirs, settings, calc_width_error=False)
Ejemplo n.º 30
0
def test_voronoi_clustering_1():
    numpy.random.seed(1)

    filename = "test_clustering_sa_h5py.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write tracks data.
    category = numpy.zeros(10, dtype=numpy.int32)
    x = 10.0 * numpy.arange(10)
    y = 10.0 * numpy.arange(10)
    z = numpy.zeros(10)

    with saH5Py.SAH5Py(h5_name, is_existing=False) as h5:
        h5.setMovieInformation(1, 1, 2, "")
        h5.setPixelSize(1.0)

        for i in range(100):
            tracks = {
                "category": category,
                "x": x + numpy.random.normal(scale=0.1, size=10),
                "y": y + numpy.random.normal(scale=0.1, size=10),
                "z": z + numpy.random.normal(scale=0.1, size=10)
            }

            h5.addTracks(tracks)

    # Cluster data with voronoi.
    voronoiAnalysis.findClusters(h5_name, 0.1, 10, verbose=False)

    # Check clustering results.
    with clSAH5Py.SAH5Clusters(h5_name) as cl_h5:
        assert (cl_h5.getNClusters() == 10)
        for index, cluster in cl_h5.clustersIterator(skip_unclustered=True,
                                                     fields=["x", "y", "z"]):
            for elt in ['x', 'y', 'z']:
                dev = numpy.std(cluster[elt])
                assert (dev > 0.07)
                assert (dev < 0.12)

    # Calculate common cluster statistics.
    stats_name = dbscanAnalysis.clusterStats(h5_name, 50, verbose=False)

    # Check statistics.
    stats = numpy.loadtxt(stats_name, skiprows=1)
    index = numpy.argsort(stats[:, 3])
    assert (stats.shape[0] == 10)
    assert (numpy.allclose(stats[:, 0], numpy.arange(10) + 1))
    assert (numpy.allclose(stats[:, 1], numpy.zeros(10)))
    assert (numpy.count_nonzero(
        numpy.greater(stats[:, 2], 80.0 * numpy.ones(10))) == 10)
    assert (numpy.allclose(stats[index, 3], x, rtol=0.2, atol=2.0))
    assert (numpy.allclose(stats[index, 4], y, rtol=0.2, atol=2.0))
    assert (numpy.allclose(stats[index, 5], z, rtol=0.2, atol=20.0))