def test_ia_util_12():
    """
    Test removeNeighbors().
    """
    x = numpy.array([1.0, 2.0, 4.0])
    y = numpy.ones(x.size)

    [px, py] = iaUtilsC.removeNeighbors(x, y, 0.5)
    assert (px.size == 3)
    assert (py.size == 3)

    [px, py] = iaUtilsC.removeNeighbors(x, y, 1.5)
    assert (px.size == 1)
    assert (py.size == 1)
def psfLocalizations(i3_filename, mapping_filename, frame = 1, aoi_size = 8, movie_filename = None):

    # Load localizations.
    i3_reader = readinsight3.I3Reader(i3_filename)

    # Load mapping.
    mappings = {}
    if os.path.exists(mapping_filename):
        with open(mapping_filename, 'rb') as fp:
            mappings = pickle.load(fp)
    else:
        print("Mapping file not found, single channel data?")

    # Try and determine movie frame size.
    i3_metadata = readinsight3.loadI3Metadata(i3_filename)
    if i3_metadata is None:
        if movie_filename is None:
            raise Exception("I3 metadata not found and movie filename is not specified.")
        else:
            movie_fp = datareader.inferReader(movie_filename)
            [movie_y, movie_x] = movie_fp.filmSize()[:2]
    else:
        movie_data = i3_metadata.find("movie")

        # FIXME: These may be transposed?
        movie_x = int(movie_data.find("movie_x").text)
        movie_y = int(movie_data.find("movie_y").text)
    
    # Load localizations in the requested frame.
    locs = i3_reader.getMoleculesInFrame(frame)
    print("Loaded", locs.size, "localizations.")

    # Remove localizations that are too close to each other.
    in_locs = numpy.zeros((locs["x"].size, util_c.getNPeakPar()))
    in_locs[:,util_c.getXCenterIndex()] = locs["x"]
    in_locs[:,util_c.getYCenterIndex()] = locs["y"]

    out_locs = util_c.removeNeighbors(in_locs, 2 * aoi_size)

    xf = out_locs[:,util_c.getXCenterIndex()]
    yf = out_locs[:,util_c.getYCenterIndex()]

    #
    # Remove localizations that are too close to the edge or
    # outside of the image in any of the channels.
    #
    is_good = numpy.ones(xf.size, dtype = numpy.bool)
    for i in range(xf.size):

        # Check in Channel 0.
        if (xf[i] < aoi_size) or (xf[i] + aoi_size >= movie_x):
            is_good[i] = False
            continue
        
        if (yf[i] < aoi_size) or (yf[i] + aoi_size >= movie_y):
            is_good[i] = False
            continue

        # Check other channels.
        for key in mappings:
            if not is_good[i]:
                break
            
            coeffs = mappings[key]
            [ch1, ch2, axis] = key.split("_")
            if (ch1 == "0"):

                if (axis == "x"):
                    xm = coeffs[0] + coeffs[1]*xf[i] + coeffs[2]*yf[i]
                    if (xm < aoi_size) or (xm + aoi_size >= movie_x):
                        is_good[i] = False
                        break

                elif (axis == "y"):
                    ym = coeffs[0] + coeffs[1]*xf[i] + coeffs[2]*yf[i]
                    if (ym < aoi_size) or (ym + aoi_size >= movie_y):
                        is_good[i] = False
                        break

    #
    # Save localizations for each channel.
    #
    gx = xf[is_good]
    gy = yf[is_good]

    basename = os.path.splitext(i3_filename)[0]
    with writeinsight3.I3Writer(basename + "_c1_psf.bin") as w3:
        w3.addMoleculesWithXY(gx, gy)
    
    index = 1
    while ("0_" + str(index) + "_x" in mappings):
        cx = mappings["0_" + str(index) + "_x"]
        cy = mappings["0_" + str(index) + "_y"]
        #cx = mappings[str(index) + "_0" + "_x"]
        #cy = mappings[str(index) + "_0" + "_y"]
        xm = cx[0] + cx[1] * gx + cx[2] * gy
        ym = cy[0] + cy[1] * gx + cy[2] * gy

        with writeinsight3.I3Writer(basename + "_c" + str(index+1) + "_psf.bin") as w3:
            w3.addMoleculesWithXY(xm, ym)

        index += 1

    #
    # Print localizations that were kept.
    #
    print(gx.size, "localizations were kept:")
    for i in range(gx.size):
        print("ch0: {0:.2f} {1:.2f}".format(gx[i], gy[i]))
        index = 1
        while ("0_" + str(index) + "_x" in mappings):
            cx = mappings["0_" + str(index) + "_x"]
            cy = mappings["0_" + str(index) + "_y"]
            xm = cx[0] + cx[1] * gx[i] + cx[2] * gy[i]
            ym = cy[0] + cy[1] * gx[i] + cy[2] * gy[i]
            print("ch" + str(index) + ": {0:.2f} {1:.2f}".format(xm, ym))
            index += 1
        print("")
    print("")
Example #3
0
def measurePSF():
    
    # Create sparse random localizations for PSF measurement.
    #
    print("Creating random localization.")
    sim_path = os.path.dirname(inspect.getfile(storm_analysis)) + "/simulator/"
    subprocess.call(["python", sim_path + "emitters_uniform_random.py",
                     "--bin", "sparse_random.hdf5",
                     "--density", "0.0002",
                     "--margin", str(settings.margin),
                     "--sx", str(settings.x_size),
                     "--sy", str(settings.y_size)])

    # Create sparser grid for PSF measurement.
    #
    print("Creating data for PSF measurement.")
    sim_path = os.path.dirname(inspect.getfile(storm_analysis)) + "/simulator/"
    subprocess.call(["python", sim_path + "emitters_on_grid.py",
                     "--bin", "sparse_grid.hdf5",
                     "--nx", "8",
                     "--ny", "3",
                     "--spacing", "40"])

    # Create text files for PSF measurement.
    #
    locs = saH5Py.loadLocalizations("sparse_random.hdf5")
    [xf, yf] = iaUtilsC.removeNeighbors(locs["x"], locs["y"], 2.0 * ((settings.psf_size/2)+1))
    numpy.savetxt("sparse_random.txt", numpy.transpose(numpy.vstack((xf, yf))))
    
    locs = saH5Py.loadLocalizations("sparse_grid.hdf5")
    numpy.savetxt("sparse_grid.txt", numpy.transpose(numpy.vstack((locs['x'], locs['y']))))

    # Create drift file, this is used to displace the localizations in the
    # PSF measurement movie.
    #
    dz = numpy.arange(-settings.psf_z_range, settings.psf_z_range + 0.001, 0.010)
    drift_data = numpy.zeros((dz.size, 3))
    drift_data[:,2] = dz
    numpy.savetxt("drift.txt", drift_data)

    # Also create the z-offset file.
    #
    z_offset = numpy.ones((dz.size, 2))
    z_offset[:,1] = dz
    numpy.savetxt("z_offset.txt", z_offset)
    
    z_offset[:,0] = 0
    numpy.savetxt("z_offset_none_valid.txt", z_offset)
    
    # Create simulated data for PSF measurement.
    #
    bg_f = lambda s, x, y, i3 : background.UniformBackground(s, x, y, i3, photons = 10)
    cam_f = lambda s, x, y, i3 : camera.Ideal(s, x, y, i3, 100.)
    drift_f = lambda s, x, y, i3 : drift.DriftFromFile(s, x, y, i3, "drift.txt")
    pp_f = lambda s, x, y, i3 : photophysics.AlwaysOn(s, x, y, i3, 20000.0)
    psf_f = lambda s, x, y, i3 : psf.PupilFunction(s, x, y, i3, 100.0, settings.zmn)
    
    sim = simulate.Simulate(background_factory = bg_f,
                            camera_factory = cam_f,
                            drift_factory = drift_f,
                            photophysics_factory = pp_f,
                            psf_factory = psf_f,
                            x_size = settings.x_size,
                            y_size = settings.y_size)

    if True:
        sim.simulate("sparse_grid.dax", "sparse_grid.hdf5", dz.size)
        sim.simulate("sparse_random.dax", "sparse_random.hdf5", dz.size)

    # Measure the PSF using spliner/measure_psf_beads.py and multiplane/measure_psf.py
    #

    diff_detected = False

    # Grid.
    if True:
        print("Measuring PSF (beads).")
        spliner_path = os.path.dirname(inspect.getfile(storm_analysis)) + "/spliner/"
        subprocess.call(["python", spliner_path + "measure_psf_beads.py",
                         "--movie", "sparse_grid.dax",
                         "--zoffset", "z_offset.txt",
                         "--aoi_size", str(int(settings.psf_size/2)+1),
                         "--beads", "sparse_grid.txt",
                         "--psf", "sparse_grid_beads.psf",
                         "--zrange", str(settings.psf_z_range),
                         "--zstep", str(settings.psf_z_step)])

        print("Measuring PSF (HDF5, with zoffset).")
        subprocess.call(["python", spliner_path + "measure_psf.py",
                         "--movie", "sparse_grid.dax",
                         "--bin", "sparse_grid_ref.hdf5",
                         "--psf", "sparse_grid_hdf5_zo.psf",
                         "--zoffset", "z_offset.txt",
                         "--aoi_size", str(int(settings.psf_size/2)+1),
                         "--zrange", str(settings.psf_z_range),
                         "--zstep", str(settings.psf_z_step)])

        print("Measuring PSF (HDF5).")
        subprocess.call(["python", spliner_path + "measure_psf.py",
                         "--movie", "sparse_grid.dax",
                         "--bin", "sparse_grid_ref.hdf5",
                         "--psf", "sparse_grid_hdf5.psf",
                         "--zoffset", "",
                         "--aoi_size", str(int(settings.psf_size/2)+1),
                         "--zrange", str(settings.psf_z_range),
                         "--zstep", str(settings.psf_z_step)])

        multiplane_path = os.path.dirname(inspect.getfile(storm_analysis)) + "/multi_plane/"
        print("Measure PSF (multiplane).")
        subprocess.call(["python", multiplane_path + "psf_zstack.py",
                         "--movie", "sparse_grid.dax",
                         "--bin", "sparse_grid.hdf5",
                         "--zstack", "sparse_grid_zstack",
                         "--aoi_size", str(int(settings.psf_size/2)+1)])

        subprocess.call(["python", multiplane_path + "measure_psf.py",
                         "--zstack", "sparse_grid_zstack.npy",
                         "--zoffsets", "z_offset.txt",
                         "--psf_name", "sparse_grid_hdf5_mp_zo.psf",
                         "--z_range", str(settings.psf_z_range),
                         "--z_step", str(settings.psf_z_step),
                         "--normalize", "True"])

        # Check that the PSFs are the same.
        psf_beads = numpy.load("sparse_grid_beads.psf")["psf"]
        psf_hdf5_zo = numpy.load("sparse_grid_hdf5_zo.psf")["psf"]
        psf_hdf5 = numpy.load("sparse_grid_hdf5.psf")["psf"]
        psf_hdf5_mp_zo = numpy.load("sparse_grid_hdf5_mp_zo.psf")["psf"]

        diff_detected = diff_detected or psfDiffCheck(psf_beads, psf_hdf5_zo)
        diff_detected = diff_detected or psfDiffCheck(psf_beads, psf_hdf5)

        # Here we are only checking they are close.
        if (settings.psf_size >= 20):
            diff_detected = diff_detected or psfDiffCheck(psf_beads, psf_hdf5_mp_zo, atol = 0.17, rtol = 0.17)

    # Grid, no valid z offsets.
    if True:
        print("Measuring PSF (beads).")
        spliner_path = os.path.dirname(inspect.getfile(storm_analysis)) + "/spliner/"
        try:
            subprocess.check_output(["python", spliner_path + "measure_psf_beads.py",
                                     "--movie", "sparse_grid.dax",
                                     "--zoffset", "z_offset_none_valid.txt",
                                     "--aoi_size", str(int(settings.psf_size/2)+1),
                                     "--beads", "sparse_grid.txt",
                                     "--psf", "sparse_grid_beads.psf",
                                     "--zrange", str(settings.psf_z_range),
                                     "--zstep", str(settings.psf_z_step)])
        except subprocess.CalledProcessError:
            pass
        else:
            assert False, "spliner.measure_psf_beads did not fail!"

        print("Measuring PSF (HDF5, with zoffset).")
        try:
            subprocess.check_output(["python", spliner_path + "measure_psf.py",
                                     "--movie", "sparse_grid.dax",
                                     "--bin", "sparse_grid_ref.hdf5",
                                     "--psf", "sparse_grid_hdf5_zo.psf",
                                     "--zoffset", "z_offset_none_valid.txt",
                                     "--aoi_size", str(int(settings.psf_size/2)+1),
                                     "--zrange", str(settings.psf_z_range),
                                     "--zstep", str(settings.psf_z_step)])
        except subprocess.CalledProcessError:
            pass
        else:
            assert False, "spliner.measure_psf did not fail!"            

        multiplane_path = os.path.dirname(inspect.getfile(storm_analysis)) + "/multi_plane/"
        print("Measure PSF (multiplane).")
        try:
            subprocess.check_output(["python", multiplane_path + "psf_zstack.py",
                                    "--movie", "sparse_grid.dax",
                                     "--bin", "sparse_grid.hdf5",
                                     "--zstack", "sparse_grid_zstack",
                                     "--aoi_size", str(int(settings.psf_size/2)+1)])

            subprocess.check_output(["python", multiplane_path + "measure_psf.py",
                                     "--zstack", "sparse_grid_zstack.npy",
                                     "--zoffsets", "z_offset_none_valid.txt",
                                     "--psf_name", "sparse_grid_hdf5_mp_zo.psf",
                                     "--z_range", str(settings.psf_z_range),
                                     "--z_step", str(settings.psf_z_step),
                                     "--normalize", "True"])
        except subprocess.CalledProcessError:
            pass
        else:
            assert False, "multiplane PSF measurement did not fail!"

    # Random.
    if True:
        print("Measuring PSF (beads).")
        spliner_path = os.path.dirname(inspect.getfile(storm_analysis)) + "/spliner/"
        subprocess.call(["python", spliner_path + "measure_psf_beads.py",
                         "--movie", "sparse_random.dax",
                         "--zoffset", "z_offset.txt",
                         "--aoi_size", str(int(settings.psf_size/2)+1),
                         "--beads", "sparse_random.txt",
                         "--psf", "sparse_random_beads.psf",
                         "--zrange", str(settings.psf_z_range),
                         "--zstep", str(settings.psf_z_step)])

        print("Measuring PSF (HDF5, with zoffset).")
        subprocess.call(["python", spliner_path + "measure_psf.py",
                         "--movie", "sparse_random.dax",
                         "--bin", "sparse_random_ref.hdf5",
                         "--psf", "sparse_random_hdf5_zo.psf",
                         "--zoffset", "z_offset.txt",
                         "--aoi_size", str(int(settings.psf_size/2)+1),
                         "--zrange", str(settings.psf_z_range),
                         "--zstep", str(settings.psf_z_step)])

        print("Measuring PSF (HDF5).")
        subprocess.call(["python", spliner_path + "measure_psf.py",
                         "--movie", "sparse_random.dax",
                         "--bin", "sparse_random_ref.hdf5",
                         "--psf", "sparse_random_hdf5.psf",
                         "--zoffset", "",
                         "--aoi_size", str(int(settings.psf_size/2)+1),
                         "--zrange", str(settings.psf_z_range),
                         "--zstep", str(settings.psf_z_step)])    

        psf_beads = numpy.load("sparse_random_beads.psf")["psf"]
        psf_hdf5_zo = numpy.load("sparse_random_hdf5_zo.psf")["psf"]
        psf_hdf5 = numpy.load("sparse_random_hdf5.psf")["psf"]

        diff_detected = diff_detected or psfDiffCheck(psf_beads, psf_hdf5_zo)
        diff_detected = diff_detected or psfDiffCheck(psf_beads, psf_hdf5)
    
    if diff_detected:
        print("Difference detected in PSF measurements!")
    else:
        print("No differences detected, all good.")

    if False:
        with tifffile.TiffWriter("psf_diff.tif") as tf:
            for i in range(psf_beads.shape[0]):
                tf.save((psf_beads[i,:,:] - psf_hdf5_zo[i,:,:]).astype(numpy.float32))
Example #4
0
    # Select localizations in current frame & not near the edges.
    mask = (i3_data['fr'] == curf) & (i3_data['x'] > aoi_size) & (
        i3_data['x'] < (dax_y - aoi_size - 1)) & (i3_data['y'] > aoi_size) & (
            i3_data['y'] < (dax_x - aoi_size - 1))
    xr = i3_data['x'][mask]
    yr = i3_data['y'][mask]
    ht = i3_data['h'][mask]

    # Remove localizations that are too close to each other.
    in_peaks = numpy.zeros((xr.size, util_c.getNResultsPar()))
    in_peaks[:, util_c.getXCenterIndex()] = xr
    in_peaks[:, util_c.getYCenterIndex()] = yr
    in_peaks[:, util_c.getHeightIndex()] = ht

    out_peaks = util_c.removeNeighbors(in_peaks, aoi_size)

    print(curf, in_peaks.shape, out_peaks.shape)

    # Use remaining localizations to calculate spline.
    image = dax_data.loadAFrame(curf - 1).astype(numpy.float64)

    xr = out_peaks[:, util_c.getXCenterIndex()]
    yr = out_peaks[:, util_c.getYCenterIndex()]
    ht = out_peaks[:, util_c.getHeightIndex()]

    for i in range(xr.size):
        xf = xr[i]
        yf = yr[i]
        xi = int(xf)
        yi = int(yf)
Example #5
0
def measurePSF(movie_name,
               zfile_name,
               movie_mlist,
               psf_name,
               want2d=False,
               aoi_size=12,
               z_range=750.0,
               z_step=50.0):
    """
    The actual z range is 2x z_range (i.e. from -z_range to z_range).
    """

    # Load dax file, z offset file and molecule list file.
    dax_data = datareader.inferReader(movie_name)
    z_offsets = None
    if os.path.exists(zfile_name):
        try:
            z_offsets = numpy.loadtxt(zfile_name, ndmin=2)[:, 1]
        except IndexError:
            z_offsets = None
            print("z offsets were not loaded.")
    i3_data = readinsight3.loadI3File(movie_mlist)

    if want2d:
        print("Measuring 2D PSF")
    else:
        print("Measuring 3D PSF")

    #
    # Go through the frames identifying good peaks and adding them
    # to the average psf. For 3D molecule z positions are rounded to
    # the nearest 50nm.
    #
    z_mid = int(z_range / z_step)
    max_z = 2 * z_mid + 1

    average_psf = numpy.zeros((max_z, 4 * aoi_size, 4 * aoi_size))
    peaks_used = 0
    totals = numpy.zeros(max_z)
    [dax_x, dax_y, dax_l] = dax_data.filmSize()
    for curf in range(dax_l):

        # Select localizations in current frame & not near the edges.
        mask = (i3_data['fr'] == curf + 1) & (i3_data['x'] > aoi_size) & (
            i3_data['x'] <
            (dax_x - aoi_size - 1)) & (i3_data['y'] >
                                       aoi_size) & (i3_data['y'] <
                                                    (dax_y - aoi_size - 1))
        xr = i3_data['x'][mask]
        yr = i3_data['y'][mask]

        # Use the z offset file if it was specified, otherwise use localization z positions.
        if z_offsets is None:
            if (curf == 0):
                print("Using fit z locations.")
            zr = i3_data['z'][mask]
        else:
            if (curf == 0):
                print("Using z offset file.")
            zr = numpy.ones(xr.size) * z_offsets[curf]

        ht = i3_data['h'][mask]

        # Remove localizations that are too close to each other.
        in_peaks = numpy.zeros((xr.size, util_c.getNPeakPar()))
        in_peaks[:, util_c.getXCenterIndex()] = xr
        in_peaks[:, util_c.getYCenterIndex()] = yr
        in_peaks[:, util_c.getZCenterIndex()] = zr
        in_peaks[:, util_c.getHeightIndex()] = ht

        out_peaks = util_c.removeNeighbors(in_peaks, 2 * aoi_size)
        #out_peaks = util_c.removeNeighbors(in_peaks, aoi_size)

        print(curf, "peaks in", in_peaks.shape[0], ", peaks out",
              out_peaks.shape[0])

        # Use remaining localizations to calculate spline.
        image = dax_data.loadAFrame(curf).astype(numpy.float64)

        xr = out_peaks[:, util_c.getXCenterIndex()]
        yr = out_peaks[:, util_c.getYCenterIndex()]
        zr = out_peaks[:, util_c.getZCenterIndex()]
        ht = out_peaks[:, util_c.getHeightIndex()]

        for i in range(xr.size):
            xf = xr[i]
            yf = yr[i]
            zf = zr[i]
            xi = int(xf)
            yi = int(yf)
            if want2d:
                zi = 0
            else:
                zi = int(round(zf / z_step) + z_mid)

            # check the z is in range
            if (zi > -1) and (zi < max_z):

                # get localization image
                mat = image[xi - aoi_size:xi + aoi_size,
                            yi - aoi_size:yi + aoi_size]

                # zoom in by 2x
                psf = scipy.ndimage.interpolation.zoom(mat, 2.0)

                # re-center image
                psf = scipy.ndimage.interpolation.shift(
                    psf, (-2.0 * (xf - xi), -2.0 * (yf - yi)), mode='nearest')

                # add to average psf accumulator
                average_psf[zi, :, :] += psf
                totals[zi] += 1

    # Force PSF to be zero (on average) at the boundaries.
    for i in range(max_z):
        edge = numpy.concatenate((average_psf[i, 0, :], average_psf[i, -1, :],
                                  average_psf[i, :, 0], average_psf[i, :, -1]))
        average_psf[i, :, :] -= numpy.mean(edge)

    # Normalize the PSF.
    if want2d:
        max_z = 1

    for i in range(max_z):
        print(i, totals[i])
        if (totals[i] > 0.0):
            average_psf[i, :, :] = average_psf[i, :, :] / numpy.sum(
                numpy.abs(average_psf[i, :, :]))

    average_psf = average_psf / numpy.max(average_psf)

    # Save PSF (in image form).
    if True:
        import storm_analysis.sa_library.daxwriter as daxwriter
        dxw = daxwriter.DaxWriter(
            os.path.join(os.path.dirname(psf_name), "psf.dax"),
            average_psf.shape[1], average_psf.shape[2])
        for i in range(max_z):
            dxw.addFrame(1000.0 * average_psf[i, :, :] + 100)
        dxw.close()

    # Save PSF.
    if want2d:
        psf_dict = {"psf": average_psf[0, :, :], "type": "2D"}

    else:
        cur_z = -z_range
        z_vals = []
        for i in range(max_z):
            z_vals.append(cur_z)
            cur_z += z_step

        psf_dict = {
            "psf": average_psf,
            "pixel_size": 0.080,  # 1/2 the camera pixel size in nm.
            "type": "3D",
            "zmin": -z_range,
            "zmax": z_range,
            "zvals": z_vals
        }

    with open(psf_name, 'wb') as fp:
        pickle.dump(psf_dict, fp)
Example #6
0
[dax_x, dax_y, dax_l] = dax_data.filmSize()
while (curf < dax_l) and (peaks_used < min_peaks):

    # Select localizations in current frame & not near the edges.
    mask = (i3_data['fr'] == curf) & (i3_data['x'] > aoi_size) & (i3_data['x'] < (dax_y - aoi_size - 1)) & (i3_data['y'] > aoi_size) & (i3_data['y'] < (dax_x - aoi_size - 1))
    xr = i3_data['x'][mask]
    yr = i3_data['y'][mask]
    ht = i3_data['h'][mask]

    # Remove localizations that are too close to each other.
    in_peaks = numpy.zeros((xr.size,util_c.getNResultsPar()))
    in_peaks[:,util_c.getXCenterIndex()] = xr
    in_peaks[:,util_c.getYCenterIndex()] = yr
    in_peaks[:,util_c.getHeightIndex()] = ht

    out_peaks = util_c.removeNeighbors(in_peaks, aoi_size)

    print(curf, in_peaks.shape, out_peaks.shape)

    # Use remaining localizations to calculate spline.
    image = dax_data.loadAFrame(curf-1).astype(numpy.float64)

    xr = out_peaks[:,util_c.getXCenterIndex()]
    yr = out_peaks[:,util_c.getYCenterIndex()]
    ht = out_peaks[:,util_c.getHeightIndex()]

    for i in range(xr.size):
        xf = xr[i]
        yf = yr[i]
        xi = int(xf)
        yi = int(yf)
Example #7
0
def measurePSF(movie_name, zfile_name, movie_mlist, psf_name, want2d = False, aoi_size = 12, z_range = 750.0, z_step = 50.0):
    """
    The actual z range is 2x z_range (i.e. from -z_range to z_range).
    """
    
    # Load dax file, z offset file and molecule list file.
    dax_data = datareader.inferReader(movie_name)
    z_offsets = None
    if os.path.exists(zfile_name):
        try:
            z_offsets = numpy.loadtxt(zfile_name, ndmin = 2)[:,1]
        except IndexError:
            z_offsets = None
            print("z offsets were not loaded.")
    i3_data = readinsight3.loadI3File(movie_mlist)

    if want2d:
        print("Measuring 2D PSF")
    else:
        print("Measuring 3D PSF")

    #
    # Go through the frames identifying good peaks and adding them
    # to the average psf. For 3D molecule z positions are rounded to 
    # the nearest 50nm.
    #
    z_mid = int(z_range/z_step)
    max_z = 2 * z_mid + 1

    average_psf = numpy.zeros((max_z,4*aoi_size,4*aoi_size))
    peaks_used = 0
    totals = numpy.zeros(max_z)
    [dax_x, dax_y, dax_l] = dax_data.filmSize()
    for curf in range(dax_l):

        # Select localizations in current frame & not near the edges.
        mask = (i3_data['fr'] == curf+1) & (i3_data['x'] > aoi_size) & (i3_data['x'] < (dax_x - aoi_size - 1)) & (i3_data['y'] > aoi_size) & (i3_data['y'] < (dax_y - aoi_size - 1))
        xr = i3_data['x'][mask]
        yr = i3_data['y'][mask]

        # Use the z offset file if it was specified, otherwise use localization z positions.
        if z_offsets is None:
            if (curf == 0):
                print("Using fit z locations.")
            zr = i3_data['z'][mask]
        else:
            if (curf == 0):
                print("Using z offset file.")
            zr = numpy.ones(xr.size) * z_offsets[curf]

        ht = i3_data['h'][mask]

        # Remove localizations that are too close to each other.
        in_peaks = numpy.zeros((xr.size,util_c.getNPeakPar()))
        in_peaks[:,util_c.getXCenterIndex()] = xr
        in_peaks[:,util_c.getYCenterIndex()] = yr
        in_peaks[:,util_c.getZCenterIndex()] = zr
        in_peaks[:,util_c.getHeightIndex()] = ht

        out_peaks = util_c.removeNeighbors(in_peaks, 2*aoi_size)
        #out_peaks = util_c.removeNeighbors(in_peaks, aoi_size)

        print(curf, "peaks in", in_peaks.shape[0], ", peaks out", out_peaks.shape[0])

        # Use remaining localizations to calculate spline.
        image = dax_data.loadAFrame(curf).astype(numpy.float64)

        xr = out_peaks[:,util_c.getXCenterIndex()]
        yr = out_peaks[:,util_c.getYCenterIndex()]
        zr = out_peaks[:,util_c.getZCenterIndex()]
        ht = out_peaks[:,util_c.getHeightIndex()]

        for i in range(xr.size):
            xf = xr[i]
            yf = yr[i]
            zf = zr[i]
            xi = int(xf)
            yi = int(yf)
            if want2d:
                zi = 0
            else:
                zi = int(round(zf/z_step) + z_mid)

            # check the z is in range
            if (zi > -1) and (zi < max_z):

                # get localization image
                mat = image[xi-aoi_size:xi+aoi_size,
                            yi-aoi_size:yi+aoi_size]

                # zoom in by 2x
                psf = scipy.ndimage.interpolation.zoom(mat, 2.0)

                # re-center image
                psf = scipy.ndimage.interpolation.shift(psf, (-2.0*(xf-xi), -2.0*(yf-yi)), mode='nearest')

                # add to average psf accumulator
                average_psf[zi,:,:] += psf
                totals[zi] += 1

    # Force PSF to be zero (on average) at the boundaries.
    for i in range(max_z):
        edge = numpy.concatenate((average_psf[i,0,:],
                                  average_psf[i,-1,:],
                                  average_psf[i,:,0],
                                  average_psf[i,:,-1]))
        average_psf[i,:,:] -= numpy.mean(edge)

    # Normalize the PSF.
    if want2d:
        max_z = 1

    for i in range(max_z):
        print(i, totals[i])
        if (totals[i] > 0.0):
            average_psf[i,:,:] = average_psf[i,:,:]/numpy.sum(numpy.abs(average_psf[i,:,:]))

    average_psf = average_psf/numpy.max(average_psf)

    # Save PSF (in image form).
    if True:
        import storm_analysis.sa_library.daxwriter as daxwriter
        dxw = daxwriter.DaxWriter(os.path.join(os.path.dirname(psf_name), "psf.dax"),
                                  average_psf.shape[1],
                                  average_psf.shape[2])
        for i in range(max_z):
            dxw.addFrame(1000.0 * average_psf[i,:,:] + 100)
        dxw.close()

    # Save PSF.
    if want2d:
        psf_dict = {"psf" : average_psf[0,:,:],
                    "type" : "2D"}

    else:
        cur_z = -z_range
        z_vals = []
        for i in range(max_z):
            z_vals.append(cur_z)
            cur_z += z_step

        psf_dict = {"psf" : average_psf,
                    "pixel_size" : 0.080, # 1/2 the camera pixel size in nm.
                    "type" : "3D",
                    "zmin" : -z_range,
                    "zmax" : z_range,
                    "zvals" : z_vals}

    pickle.dump(psf_dict, open(psf_name, 'wb'))
Example #8
0
def measurePSF():
    
    # Create sparse random localizations for PSF measurement.
    #
    print("Creating random localization.")
    sim_path = os.path.dirname(inspect.getfile(storm_analysis)) + "/simulator/"
    subprocess.call(["python", sim_path + "emitters_uniform_random.py",
                     "--bin", "sparse_random.hdf5",
                     "--density", "0.0002",
                     "--margin", str(settings.margin),
                     "--sx", str(settings.x_size),
                     "--sy", str(settings.y_size)])

    # Create sparser grid for PSF measurement.
    #
    print("Creating data for PSF measurement.")
    sim_path = os.path.dirname(inspect.getfile(storm_analysis)) + "/simulator/"
    subprocess.call(["python", sim_path + "emitters_on_grid.py",
                     "--bin", "sparse_grid.hdf5",
                     "--nx", "8",
                     "--ny", "3",
                     "--spacing", "40"])

    # Create text files for PSF measurement.
    #
    locs = saH5Py.loadLocalizations("sparse_random.hdf5")
    [xf, yf] = iaUtilsC.removeNeighbors(locs["x"], locs["y"], 2.0 * ((settings.psf_size/2)+1))
    numpy.savetxt("sparse_random.txt", numpy.transpose(numpy.vstack((xf, yf))))
    
    locs = saH5Py.loadLocalizations("sparse_grid.hdf5")
    numpy.savetxt("sparse_grid.txt", numpy.transpose(numpy.vstack((locs['x'], locs['y']))))

    # Create drift file, this is used to displace the localizations in the
    # PSF measurement movie.
    #
    dz = numpy.arange(-settings.psf_z_range, settings.psf_z_range + 0.001, 0.010)
    drift_data = numpy.zeros((dz.size, 3))
    drift_data[:,2] = dz
    numpy.savetxt("drift.txt", drift_data)

    # Also create the z-offset file.
    #
    z_offset = numpy.ones((dz.size, 2))
    z_offset[:,1] = dz
    numpy.savetxt("z_offset.txt", z_offset)
    
    z_offset[:,0] = 0
    numpy.savetxt("z_offset_none_valid.txt", z_offset)
    
    # Create simulated data for PSF measurement.
    #
    bg_f = lambda s, x, y, i3 : background.UniformBackground(s, x, y, i3, photons = 10)
    cam_f = lambda s, x, y, i3 : camera.Ideal(s, x, y, i3, 100.)
    drift_f = lambda s, x, y, i3 : drift.DriftFromFile(s, x, y, i3, "drift.txt")
    pp_f = lambda s, x, y, i3 : photophysics.AlwaysOn(s, x, y, i3, 20000.0)
    psf_f = lambda s, x, y, i3 : psf.PupilFunction(s, x, y, i3, 100.0, settings.zmn)
    
    sim = simulate.Simulate(background_factory = bg_f,
                            camera_factory = cam_f,
                            drift_factory = drift_f,
                            photophysics_factory = pp_f,
                            psf_factory = psf_f,
                            x_size = settings.x_size,
                            y_size = settings.y_size)

    if True:
        sim.simulate("sparse_grid.tif", "sparse_grid.hdf5", dz.size)
        sim.simulate("sparse_random.tif", "sparse_random.hdf5", dz.size)

    # Measure the PSF using spliner/measure_psf_beads.py and multiplane/measure_psf.py
    #

    diff_detected = False

    # Grid.
    if True:

        # Remove old results.
        for elt in ["sparse_grid_beads.psf", "sparse_grid_hdf5_zo.psf",
                    "sparse_grid_hdf5.psf", "sparse_grid_hdf5_mp_zo.psf"]:
            if os.path.exists(elt):
                os.remove(elt)
        
        print("Measuring PSF (beads).")
        spliner_path = os.path.dirname(inspect.getfile(storm_analysis)) + "/spliner/"
        subprocess.call(["python", spliner_path + "measure_psf_beads.py",
                         "--movie", "sparse_grid.tif",
                         "--zoffset", "z_offset.txt",
                         "--aoi_size", str(int(settings.psf_size/2)+1),
                         "--beads", "sparse_grid.txt",
                         "--psf", "sparse_grid_beads.psf",
                         "--zrange", str(settings.psf_z_range),
                         "--zstep", str(settings.psf_z_step)])

        print("Measuring PSF (HDF5, with zoffset).")
        subprocess.call(["python", spliner_path + "measure_psf.py",
                         "--movie", "sparse_grid.tif",
                         "--bin", "sparse_grid_ref.hdf5",
                         "--psf", "sparse_grid_hdf5_zo.psf",
                         "--zoffset", "z_offset.txt",
                         "--aoi_size", str(int(settings.psf_size/2)+1),
                         "--zrange", str(settings.psf_z_range),
                         "--zstep", str(settings.psf_z_step)])

        print("Measuring PSF (HDF5).")
        subprocess.call(["python", spliner_path + "measure_psf.py",
                         "--movie", "sparse_grid.tif",
                         "--bin", "sparse_grid_ref.hdf5",
                         "--psf", "sparse_grid_hdf5.psf",
                         "--zoffset", "",
                         "--aoi_size", str(int(settings.psf_size/2)+1),
                         "--zrange", str(settings.psf_z_range),
                         "--zstep", str(settings.psf_z_step)])

        multiplane_path = os.path.dirname(inspect.getfile(storm_analysis)) + "/multi_plane/"
        print("Measure PSF (multiplane).")
        subprocess.call(["python", multiplane_path + "psf_zstack.py",
                         "--movie", "sparse_grid.tif",
                         "--bin", "sparse_grid.hdf5",
                         "--zstack", "sparse_grid_zstack",
                         "--aoi_size", str(int(settings.psf_size/2)+1)])

        subprocess.call(["python", multiplane_path + "measure_psf.py",
                         "--zstack", "sparse_grid_zstack.npy",
                         "--zoffsets", "z_offset.txt",
                         "--psf_name", "sparse_grid_hdf5_mp_zo.psf",
                         "--z_range", str(settings.psf_z_range),
                         "--z_step", str(settings.psf_z_step),
                         "--normalize"])

        # Check that the PSFs are the same.
        psf_beads = numpy.load("sparse_grid_beads.psf", allow_pickle = True)["psf"]
        psf_hdf5_zo = numpy.load("sparse_grid_hdf5_zo.psf", allow_pickle = True)["psf"]
        psf_hdf5 = numpy.load("sparse_grid_hdf5.psf", allow_pickle = True)["psf"]
        psf_hdf5_mp_zo = numpy.load("sparse_grid_hdf5_mp_zo.psf", allow_pickle = True)["psf"]

        diff_detected = diff_detected or psfDiffCheck(psf_beads, psf_hdf5_zo)
        diff_detected = diff_detected or psfDiffCheck(psf_beads, psf_hdf5)

        # Here we are only checking they are close.
        if (settings.psf_size >= 20):
            diff_detected = diff_detected or psfDiffCheck(psf_beads, psf_hdf5_mp_zo, atol = 0.17, rtol = 0.17)

    # Grid, no valid z offsets. These are supposed to fail.
    #
    if True:
        print("Measuring PSF (beads).")
        spliner_path = os.path.dirname(inspect.getfile(storm_analysis)) + "/spliner/"
        try:
            subprocess.check_output(["python", spliner_path + "measure_psf_beads.py",
                                     "--movie", "sparse_grid.tif",
                                     "--zoffset", "z_offset_none_valid.txt",
                                     "--aoi_size", str(int(settings.psf_size/2)+1),
                                     "--beads", "sparse_grid.txt",
                                     "--psf", "sparse_grid_beads.psf",
                                     "--zrange", str(settings.psf_z_range),
                                     "--zstep", str(settings.psf_z_step)])
        except subprocess.CalledProcessError:
            pass
        else:
            assert False, "spliner.measure_psf_beads did not fail!"

        print("Measuring PSF (HDF5, with zoffset).")
        try:
            subprocess.check_output(["python", spliner_path + "measure_psf.py",
                                     "--movie", "sparse_grid.tif",
                                     "--bin", "sparse_grid_ref.hdf5",
                                     "--psf", "sparse_grid_hdf5_zo.psf",
                                     "--zoffset", "z_offset_none_valid.txt",
                                     "--aoi_size", str(int(settings.psf_size/2)+1),
                                     "--zrange", str(settings.psf_z_range),
                                     "--zstep", str(settings.psf_z_step)])
        except subprocess.CalledProcessError:
            pass
        else:
            assert False, "spliner.measure_psf did not fail!"            

        multiplane_path = os.path.dirname(inspect.getfile(storm_analysis)) + "/multi_plane/"
        print("Measure PSF (multiplane).")
        try:
            subprocess.check_output(["python", multiplane_path + "psf_zstack.py",
                                    "--movie", "sparse_grid.tif",
                                     "--bin", "sparse_grid.hdf5",
                                     "--zstack", "sparse_grid_zstack",
                                     "--aoi_size", str(int(settings.psf_size/2)+1)])

            subprocess.check_output(["python", multiplane_path + "measure_psf.py",
                                     "--zstack", "sparse_grid_zstack.npy",
                                     "--zoffsets", "z_offset_none_valid.txt",
                                     "--psf_name", "sparse_grid_hdf5_mp_zo.psf",
                                     "--z_range", str(settings.psf_z_range),
                                     "--z_step", str(settings.psf_z_step),
                                     "--normalize"])
        except subprocess.CalledProcessError:
            pass
        else:
            assert False, "multiplane PSF measurement did not fail!"

    # Random.
    if True:

        # Remove old results.
        for elt in ["sparse_random_beads.psf", "sparse_random_hdf5_zo.psf", "sparse_random_hdf5.psf"]:
            if os.path.exists(elt):
                os.remove(elt)
        
        print("Measuring PSF (beads).")
        spliner_path = os.path.dirname(inspect.getfile(storm_analysis)) + "/spliner/"
        subprocess.call(["python", spliner_path + "measure_psf_beads.py",
                         "--movie", "sparse_random.tif",
                         "--zoffset", "z_offset.txt",
                         "--aoi_size", str(int(settings.psf_size/2)+1),
                         "--beads", "sparse_random.txt",
                         "--psf", "sparse_random_beads.psf",
                         "--zrange", str(settings.psf_z_range),
                         "--zstep", str(settings.psf_z_step)])

        print("Measuring PSF (HDF5, with zoffset).")
        subprocess.call(["python", spliner_path + "measure_psf.py",
                         "--movie", "sparse_random.tif",
                         "--bin", "sparse_random_ref.hdf5",
                         "--psf", "sparse_random_hdf5_zo.psf",
                         "--zoffset", "z_offset.txt",
                         "--aoi_size", str(int(settings.psf_size/2)+1),
                         "--zrange", str(settings.psf_z_range),
                         "--zstep", str(settings.psf_z_step)])

        print("Measuring PSF (HDF5).")
        subprocess.call(["python", spliner_path + "measure_psf.py",
                         "--movie", "sparse_random.tif",
                         "--bin", "sparse_random_ref.hdf5",
                         "--psf", "sparse_random_hdf5.psf",
                         "--zoffset", "",
                         "--aoi_size", str(int(settings.psf_size/2)+1),
                         "--zrange", str(settings.psf_z_range),
                         "--zstep", str(settings.psf_z_step)])    

        psf_beads = numpy.load("sparse_random_beads.psf", allow_pickle = True)["psf"]
        psf_hdf5_zo = numpy.load("sparse_random_hdf5_zo.psf", allow_pickle = True)["psf"]
        psf_hdf5 = numpy.load("sparse_random_hdf5.psf", allow_pickle = True)["psf"]

        diff_detected = diff_detected or psfDiffCheck(psf_beads, psf_hdf5_zo)
        diff_detected = diff_detected or psfDiffCheck(psf_beads, psf_hdf5)
    
    if diff_detected:
        print("Difference detected in PSF measurements!")
    else:
        print("No differences detected, all good.")

    if False:
        with tifffile.TiffWriter("psf_diff.tif") as tf:
            for i in range(psf_beads.shape[0]):
                tf.save((psf_beads[i,:,:] - psf_hdf5_zo[i,:,:]).astype(numpy.float32))
Example #9
0
def measurePSF():

    # Create sparse random localizations for PSF measurement.
    #
    print("Creating random localization.")
    emittersUniformRandom.emittersUniformRandom("sparse_random.hdf5", 0.0002,
                                                settings.margin,
                                                settings.x_size,
                                                settings.y_size, 0.0)

    # Create sparser grid for PSF measurement.
    #
    print("Creating data for PSF measurement.")
    emittersOnGrid.emittersOnGrid("sparse_grid.hdf5", 8, 3, 1.5, 40, 0.0, 0.0)

    # Create text files for PSF measurement.
    #
    locs = saH5Py.loadLocalizations("sparse_random.hdf5")
    [xf, yf] = iaUtilsC.removeNeighbors(locs["x"], locs["y"],
                                        2.0 * ((settings.psf_size / 2) + 1))
    numpy.savetxt("sparse_random.txt", numpy.transpose(numpy.vstack((xf, yf))))

    locs = saH5Py.loadLocalizations("sparse_grid.hdf5")
    numpy.savetxt("sparse_grid.txt",
                  numpy.transpose(numpy.vstack((locs['x'], locs['y']))))

    # Create drift file, this is used to displace the localizations in the
    # PSF measurement movie.
    #
    dz = numpy.arange(-settings.psf_z_range, settings.psf_z_range + 0.001,
                      0.010)
    drift_data = numpy.zeros((dz.size, 3))
    drift_data[:, 2] = dz
    numpy.savetxt("drift.txt", drift_data)

    # Also create the z-offset file.
    #
    z_offset = numpy.ones((dz.size, 2))
    z_offset[:, 1] = dz
    numpy.savetxt("z_offset.txt", z_offset)

    z_offset[:, 0] = 0
    numpy.savetxt("z_offset_none_valid.txt", z_offset)

    # Create simulated data for PSF measurement.
    #
    bg_f = lambda s, x, y, i3: background.UniformBackground(
        s, x, y, i3, photons=10)
    cam_f = lambda s, x, y, i3: camera.Ideal(s, x, y, i3, 100.)
    drift_f = lambda s, x, y, i3: drift.DriftFromFile(s, x, y, i3, "drift.txt")
    pp_f = lambda s, x, y, i3: photophysics.AlwaysOn(s, x, y, i3, 20000.0)
    psf_f = lambda s, x, y, i3: psf.PupilFunction(s, x, y, i3, 100.0, settings.
                                                  zmn)

    sim = simulate.Simulate(background_factory=bg_f,
                            camera_factory=cam_f,
                            drift_factory=drift_f,
                            photophysics_factory=pp_f,
                            psf_factory=psf_f,
                            x_size=settings.x_size,
                            y_size=settings.y_size)

    if True:
        sim.simulate("sparse_grid.tif", "sparse_grid.hdf5", dz.size)
        sim.simulate("sparse_random.tif", "sparse_random.hdf5", dz.size)

    # Measure the PSF using spliner/measure_psf_beads.py and multiplane/measure_psf.py
    #

    diff_detected = False

    # Grid.
    if True:

        # Remove old results.
        for elt in [
                "sparse_grid_beads.psf", "sparse_grid_hdf5_zo.psf",
                "sparse_grid_hdf5.psf", "sparse_grid_hdf5_mp_zo.psf"
        ]:
            if os.path.exists(elt):
                os.remove(elt)

        print("Measuring PSF (beads).")
        measurePSFBeads.measurePSFBeads("sparse_grid.tif",
                                        "z_offset.txt",
                                        "sparse_grid.txt",
                                        "sparse_grid_beads.psf",
                                        aoi_size=int(settings.psf_size / 2 +
                                                     1),
                                        z_range=settings.psf_z_range,
                                        z_step=settings.psf_z_step)

        print("Measuring PSF (HDF5, with zoffset).")
        spMeasurePSF.measurePSF("sparse_grid.tif",
                                "z_offset.txt",
                                "sparse_grid_ref.hdf5",
                                "sparse_grid_hdf5_zo.psf",
                                aoi_size=int(settings.psf_size / 2 + 1),
                                z_range=settings.psf_z_range,
                                z_step=settings.psf_z_step)

        print("Measuring PSF (HDF5).")
        spMeasurePSF.measurePSF("sparse_grid.tif",
                                "",
                                "sparse_grid_ref.hdf5",
                                "sparse_grid_hdf5.psf",
                                aoi_size=int(settings.psf_size / 2 + 1),
                                z_range=settings.psf_z_range,
                                z_step=settings.psf_z_step)

        multiplane_path = os.path.dirname(
            inspect.getfile(storm_analysis)) + "/multi_plane/"
        print("Measure PSF (multiplane).")
        psfZStack.psfZStack("sparse_grid.tif",
                            "sparse_grid.hdf5",
                            "sparse_grid_zstack",
                            aoi_size=int(settings.psf_size / 2 + 1))

        mpMeasurePSF.measurePSF("sparse_grid_zstack.npy",
                                "z_offset.txt",
                                "sparse_grid_hdf5_mp_zo.psf",
                                z_range=settings.psf_z_range,
                                z_step=settings.psf_z_step,
                                normalize=True)

        # Check that the PSFs are the same.
        psf_beads = numpy.load("sparse_grid_beads.psf",
                               allow_pickle=True)["psf"]
        psf_hdf5_zo = numpy.load("sparse_grid_hdf5_zo.psf",
                                 allow_pickle=True)["psf"]
        psf_hdf5 = numpy.load("sparse_grid_hdf5.psf", allow_pickle=True)["psf"]
        psf_hdf5_mp_zo = numpy.load("sparse_grid_hdf5_mp_zo.psf",
                                    allow_pickle=True)["psf"]

        diff_detected = diff_detected or psfDiffCheck(psf_beads, psf_hdf5_zo)
        diff_detected = diff_detected or psfDiffCheck(psf_beads, psf_hdf5)

        # Here we are only checking they are close.
        if (settings.psf_size >= 20):
            diff_detected = diff_detected or psfDiffCheck(
                psf_beads, psf_hdf5_mp_zo, atol=0.17, rtol=0.17)

    # Grid, no valid z offsets. These are supposed to fail.
    #
    if True:
        print("Measuring PSF (beads).")
        try:
            measurePSFBeads.measurePSFBeads(
                "sparse_grid.tif",
                "z_offset_none_valid.txt",
                "sparse_grid.txt",
                "sparse_grid_beads.psf",
                aoi_size=int(settings.psf_size / 2 + 1),
                z_range=settings.psf_z_range,
                z_step=settings.psf_z_step)
        except AssertionError:
            pass
        else:
            assert False, "spliner.measure_psf_beads did not fail!"

        print("Measuring PSF (HDF5, with zoffset).")
        try:
            spMeasurePSF.measurePSF("sparse_grid.tif",
                                    "z_offset_none_valid.txt",
                                    "sparse_grid_ref.hdf5",
                                    "sparse_grid_hdf5_zo.psf",
                                    aoi_size=int(settings.psf_size / 2 + 1),
                                    z_range=settings.psf_z_range,
                                    z_step=settings.psf_z_step)
        except AssertionError:
            pass
        else:
            assert False, "spliner.measure_psf did not fail!"

        print("Measure PSF (multiplane).")
        try:
            psfZStack.psfZStack("sparse_grid.tif",
                                "sparse_grid.hdf5",
                                "sparse_grid_zstack",
                                aoi_size=int(settings.psf_size / 2 + 1))

            mpMeasurePSF.measurePSF("sparse_grid_zstack.npy",
                                    "z_offset_none_valid.txt",
                                    "sparse_grid_hdf5_mp_zo.psf",
                                    z_range=settings.psf_z_range,
                                    z_step=settings.psf_z_step,
                                    normalize=True)
        except AssertionError:
            pass
        else:
            assert False, "multiplane PSF measurement did not fail!"

    # Random.
    if True:

        # Remove old results.
        for elt in [
                "sparse_random_beads.psf", "sparse_random_hdf5_zo.psf",
                "sparse_random_hdf5.psf"
        ]:
            if os.path.exists(elt):
                os.remove(elt)

        print("Measuring PSF (beads).")
        measurePSFBeads.measurePSFBeads("sparse_random.tif",
                                        "z_offset.txt",
                                        "sparse_random.txt",
                                        "sparse_random_beads.psf",
                                        aoi_size=int(settings.psf_size / 2 +
                                                     1),
                                        z_range=settings.psf_z_range,
                                        z_step=settings.psf_z_step)

        print("Measuring PSF (HDF5, with zoffset).")
        spMeasurePSF.measurePSF("sparse_random.tif",
                                "z_offset.txt",
                                "sparse_random_ref.hdf5",
                                "sparse_random_hdf5_zo.psf",
                                aoi_size=int(settings.psf_size / 2 + 1),
                                z_range=settings.psf_z_range,
                                z_step=settings.psf_z_step)

        print("Measuring PSF (HDF5).")
        spMeasurePSF.measurePSF("sparse_random.tif",
                                "",
                                "sparse_random_ref.hdf5",
                                "sparse_random_hdf5.psf",
                                aoi_size=int(settings.psf_size / 2 + 1),
                                z_range=settings.psf_z_range,
                                z_step=settings.psf_z_step)

        psf_beads = numpy.load("sparse_random_beads.psf",
                               allow_pickle=True)["psf"]
        psf_hdf5_zo = numpy.load("sparse_random_hdf5_zo.psf",
                                 allow_pickle=True)["psf"]
        psf_hdf5 = numpy.load("sparse_random_hdf5.psf",
                              allow_pickle=True)["psf"]

        diff_detected = diff_detected or psfDiffCheck(psf_beads, psf_hdf5_zo)
        diff_detected = diff_detected or psfDiffCheck(psf_beads, psf_hdf5)

    if diff_detected:
        print("Difference detected in PSF measurements!")
    else:
        print("No differences detected, all good.")

    if False:
        with tifffile.TiffWriter("psf_diff.tif") as tf:
            for i in range(psf_beads.shape[0]):
                tf.save((psf_beads[i, :, :] - psf_hdf5_zo[i, :, :]).astype(
                    numpy.float32))
def psfLocalizations(h5_filename, mapping_filename, frame = 0, aoi_size = 8, min_height = 0.0):

    # Load localizations & movie size.
    with saH5Py.SAH5Py(h5_filename) as h5:
        locs = h5.getLocalizationsInFrame(frame)
        assert bool(locs), "No localizations found in frame " + str(frame)
        [movie_x, movie_y] = h5.getMovieInformation()[:2]

    # Load mapping.
    mappings = {}
    if os.path.exists(mapping_filename):
        with open(mapping_filename, 'rb') as fp:
            mappings = pickle.load(fp)
    else:
        print("Mapping file not found, single channel data?")

    # Remove localizations that are too dim.
    mask = (locs["height"] > min_height)

    locs_mask = {}
    for elt in ["x", "y"]:
        locs_mask[elt] = locs[elt][mask]
    
    # Remove localizations that are too close to each other.
    [xf, yf] = iaUtilsC.removeNeighbors(locs_mask["x"], locs_mask["y"], 2.0 * aoi_size)

    # Remove localizations that are too close to the edge or
    # outside of the image in any of the channels.
    #
    is_good = numpy.ones(xf.size, dtype = numpy.bool)
    for i in range(xf.size):
        
        # Check in Channel 0.
        if (xf[i] < aoi_size) or (xf[i] + aoi_size >= movie_x):
            is_good[i] = False
            continue
        
        if (yf[i] < aoi_size) or (yf[i] + aoi_size >= movie_y):
            is_good[i] = False
            continue

        # Check other channels.
        for key in mappings:
            if not is_good[i]:
                break
            
            coeffs = mappings[key]
            [ch1, ch2, axis] = key.split("_")
            if (ch1 == "0"):

                if (axis == "x"):
                    xm = coeffs[0] + coeffs[1]*xf[i] + coeffs[2]*yf[i]
                    if (xm < aoi_size) or (xm + aoi_size >= movie_x):
                        is_good[i] = False
                        break

                elif (axis == "y"):
                    ym = coeffs[0] + coeffs[1]*xf[i] + coeffs[2]*yf[i]
                    if (ym < aoi_size) or (ym + aoi_size >= movie_y):
                        is_good[i] = False
                        break

    #
    # Save localizations for each channel.
    #
    gx = xf[is_good]
    gy = yf[is_good]

    basename = os.path.splitext(h5_filename)[0]
    saH5Py.saveLocalizations(basename + "_c1_psf.hdf5", {"x" : gx, "y" : gy})
    
    index = 1
    while ("0_" + str(index) + "_x" in mappings):
        cx = mappings["0_" + str(index) + "_x"]
        cy = mappings["0_" + str(index) + "_y"]
        xm = cx[0] + cx[1] * gx + cx[2] * gy
        ym = cy[0] + cy[1] * gx + cy[2] * gy

        saH5Py.saveLocalizations(basename + "_c" + str(index+1) + "_psf.hdf5", {"x" : xm, "y" : ym})
        
        index += 1

    #
    # Print localizations that were kept.
    #
    print(gx.size, "localizations were kept out of", xf.size)
    for i in range(gx.size):
        print("ch0: {0:.2f} {1:.2f}".format(gx[i], gy[i]))
        index = 1
        while ("0_" + str(index) + "_x" in mappings):
            cx = mappings["0_" + str(index) + "_x"]
            cy = mappings["0_" + str(index) + "_y"]
            xm = cx[0] + cx[1] * gx[i] + cx[2] * gy[i]
            ym = cy[0] + cy[1] * gx[i] + cy[2] * gy[i]
            print("ch" + str(index) + ": {0:.2f} {1:.2f}".format(xm, ym))
            index += 1
        print("")
    print("")