def rollingBallSub(movie_in, movie_out, radius, sigma, offset=100):

    input_movie = datareader.inferReader(movie_in)
    output_dax = daxwriter.DaxWriter(movie_out, 0, 0)

    rb = RollingBall(radius, sigma)

    for i in range(input_movie.filmSize()[2]):

        if ((i % 10) == 0):
            print("Processing frame", i)

        image = input_movie.loadAFrame(i) - offset

        if False:
            image = image.astype(numpy.float)
            lowpass = scipy.ndimage.filters.gaussian_filter(image, sigma)
            sub = image - lowpass

        else:
            sub = rb.removeBG(image)

        output_dax.addFrame(sub + offset)

    output_dax.close()
Beispiel #2
0
    def analyzeImage(self,
                     new_image,
                     bg_estimate=None,
                     save_residual=False,
                     verbose=False):
        """
        image - The image to analyze.
        bg_estimate - (Optional) An estimate of the background.
        save_residual - (Optional) Save the residual image after peak fitting, default is False.

        return - [Found peaks, Image residual]
        """

        #
        # Pad out arrays so that we can better analyze localizations
        # near the edge of the original image.
        #
        image = padArray(new_image, self.margin)
        residual = padArray(new_image, self.margin)
        if bg_estimate is not None:
            bg_estimate = padArray(bg_estimate, self.margin)

        self.peak_finder.newImage(image)
        self.peak_fitter.newImage(image)

        if save_residual:
            resid_dax = daxwriter.DaxWriter("residual.dax", residual.shape[0],
                                            residual.shape[1])

        peaks = False
        for i in range(self.peak_finder.iterations):
            if save_residual:
                resid_dax.addFrame(residual)

            no_bg_image = self.peak_finder.subtractBackground(
                residual, bg_estimate)
            [found_new_peaks,
             peaks] = self.peak_finder.findPeaks(no_bg_image, peaks)
            if isinstance(peaks, numpy.ndarray):
                [peaks, residual] = self.peak_fitter.fitPeaks(peaks)

            if verbose:
                if isinstance(peaks, numpy.ndarray):
                    print(" peaks:", i, found_new_peaks, peaks.shape[0])
                else:
                    print(" peaks:", i, found_new_peaks, "NA")

            if not found_new_peaks:
                break

        if save_residual:
            resid_dax.addFrame(residual)
            resid_dax.close()

        if isinstance(peaks, numpy.ndarray):
            peaks[:, utilC.getXCenterIndex()] -= float(self.margin)
            peaks[:, utilC.getYCenterIndex()] -= float(self.margin)

        return [peaks, residual]
Beispiel #3
0
    def __init__(self,
                 image_size,
                 spline_file,
                 number_zvals,
                 timestep,
                 upsample=1):
        self.background = numpy.zeros(image_size)
        self.psf_heights = []
        self.upsample = int(upsample)

        im_size_x, im_size_y = image_size
        size_x = im_size_x * self.upsample
        size_y = im_size_y * self.upsample

        s_to_psf = splineToPSF.SplineToPSF(spline_file)
        self.spline_size_x = self.spline_size_y = s_to_psf.getSize()

        # Calculate z values to use.
        self.z_min = s_to_psf.getZMin()
        self.z_max = s_to_psf.getZMax()
        z_step = (self.z_max - self.z_min) / float(number_zvals - 1.0)
        self.zvals = []
        for i in range(number_zvals):
            self.zvals.append(self.z_min + float(i) * z_step)

        psfs = numpy.zeros((size_x, size_y, len(self.zvals)))

        # Add PSFs.
        for i in range(len(self.zvals)):
            psfs[:, :, i] = s_to_psf.getPSF(self.zvals[i],
                                            shape=(im_size_x, im_size_y),
                                            up_sample=upsample,
                                            normalize=True)
            self.psf_heights.append(numpy.max(psfs[:, :, i]))
            #print "fista_decon", i, numpy.max(psfs[:,:,i])

        # Check PSFs.
        if 1:
            import storm_analysis.sa_library.daxwriter as daxwriter

            psf_data = daxwriter.DaxWriter("fista_decon_psf.dax",
                                           psfs.shape[0], psfs.shape[1])
            for i in range(psfs.shape[2]):
                temp = psfs[:, :, i]
                psf_data.addFrame(1000.0 * temp / numpy.max(temp))
            psf_data.close()

        if 0:
            # Python solver (useful for debugging).
            print("Using Python solver.")
            self.fsolver = fista_3d.FISTA(psfs, timestep)
        else:
            # C solver (about 4x faster).
            print("Using C solver.")
            self.fsolver = fistaFFTC.FISTA(psfs, timestep)
Beispiel #4
0
def saveAsDax(file_name, A, measured_pixels):
    import storm_analysis.sa_library.daxwriter as daxwriter

    dx = daxwriter.DaxWriter(file_name,0,0)
    ncols = A.shape[1]
    for i in range(A.shape[1]):
        x = numpy.zeros(ncols)
        x[i] = 1.0
        b = numpy.dot(A,x)
        b = b.reshape(measured_pixels,measured_pixels)
        dx.addFrame(10000.0*b)

    dx.close()
Beispiel #5
0
def waveletBGRSub(movie_in,
                  movie_out,
                  wavelet_type,
                  wavelet_level,
                  iterations,
                  threshold,
                  offset=100):

    input_movie = datareader.inferReader(movie_in)
    output_dax = daxwriter.DaxWriter(movie_out, 0, 0)

    wbgr = WaveletBGR(wavelet_type=wavelet_type)

    for i in range(input_movie.filmSize()[2]):

        if ((i % 10) == 0):
            print("Processing frame", i)

        image = input_movie.loadAFrame(i) - offset
        sub = wbgr.removeBG(image, iterations, threshold, wavelet_level)
        output_dax.addFrame(sub + offset)

    output_dax.close()
Beispiel #6
0
import mlem_c as mlem

# defaults
scale = 8
iters = 500

# user defined
input_movie = datareader.SPEReader(sys.argv[1])

[x_size, y_size, frames] = input_movie.filmSize()

if (x_size != y_size):
    print "Movies must be square.."
    exit()

output_movie = daxwriter.DaxWriter(sys.argv[2], x_size*scale, y_size*scale)
camera_offset = float(sys.argv[3])
sigma = float(sys.argv[4])
compression = float(sys.argv[5])

mlemd = mlem.Fitter(numpy.zeros((x_size,y_size)),
                    sigma,
                    scale,
                    0.0)

# process the film
for i in range(frames):
    print "Processing:", i
    
    # load image
    image = input_movie.loadAFrame(i) - camera_offset
Beispiel #7
0
if (__name__ == "__main__"):

    import sys

    import storm_analysis.sa_library.datareader as datareader
    import storm_analysis.sa_library.daxwriter as daxwriter

    if (len(sys.argv) < 7):
        print(
            "usage <movie (in)> <subtracted movie (out)> <wavelet_type> <wavelet_level> <iterations> <threshold> <baseline (optional, 100 default)>"
        )
        exit()

    input_movie = datareader.inferReader(sys.argv[1])
    output_dax = daxwriter.DaxWriter(sys.argv[2], 0, 0)

    iterations = int(sys.argv[5])
    threshold = float(sys.argv[6])
    wavelet_level = int(sys.argv[4])

    offset = 100.0
    if (len(sys.argv) == 8):
        offset = float(sys.argv[7])

    wbgr = WaveletBGR(wavelet_type=sys.argv[3])

    for i in range(input_movie.filmSize()[2]):

        if ((i % 10) == 0):
            print("Processing frame", i)
if (__name__ == "__main__"):

    import sys

    import storm_analysis.sa_library.datareader as datareader
    import storm_analysis.sa_library.daxwriter as daxwriter

    if (len(sys.argv) != 4):
        print("usage: <input movie> <output movie> <number of frames>")
        exit()

    input_movie = datareader.inferReader(sys.argv[1])
    [w, h, l] = input_movie.filmSize()

    output_movie = daxwriter.DaxWriter(sys.argv[2], w, h)

    n_frames = int(sys.argv[3])
    if (n_frames > l):
        n_frames = l

    sbge = StaticBGEstimator(input_movie)
    for i in range(n_frames):
        diff = input_movie.loadAFrame(i) - sbge.estimateBG(i) + 100
        output_movie.addFrame(diff)

    output_movie.close()

#
# The MIT License
#
def measurePSFBeads(movie_name,
                    zfile_name,
                    beads_file,
                    psf_name,
                    want2d=False,
                    aoi_size=12,
                    z_range=600.0,
                    z_step=50.0):

    # Load movie file.
    movie_data = datareader.inferReader(movie_name)

    #
    # Load the z-offset information for the dax file.
    #
    #   This is a text file with one line per frame that contains the
    #   z-offset (in nm) for that frame. Each line is a space separated
    #   valid, z_pos pair. If valid if 0 the frame will be ignored,
    #   otherwise it will be used.
    #
    data = numpy.loadtxt(zfile_name)
    valid = data[:, 0]
    z_off = data[:, 1]

    #
    # Load the locations of the beads.
    #
    #   This is a text file the contains the locations of the beads that
    #   will be used to construct the PSF. Each line is a space separated
    #   x, y pair of bead locations (in pixels).
    #
    #   One way to create this file is to look at the bead movie with
    #   visualizer.py and record the center positions of several beads.
    #
    data = numpy.loadtxt(beads_file, ndmin=2)
    bead_x = data[:, 0]
    bead_y = data[:, 1]

    #
    # Go through the frames and the bead images to the average psf. Z
    # positions are rounded to the nearest 50nm. You might need to
    # adjust z_range depending on your experiment.
    #
    z_mid = int(z_range / z_step)
    max_z = 2 * z_mid + 1
    average_psf = numpy.zeros((max_z, 4 * aoi_size, 4 * aoi_size))
    totals = numpy.zeros(max_z)
    [dax_x, dax_y, dax_l] = movie_data.filmSize()
    for curf in range(dax_l):

        if ((curf % 50) == 0):
            print("Processing frame:", curf)

        if (abs(valid[curf]) < 1.0e-6):
            #    print "skipping", valid[curf]
            continue

        # Use bead localization to calculate spline.
        image = movie_data.loadAFrame(curf).astype(numpy.float64)

        # Get frame z and check that it is in range.
        zf = z_off[curf]
        zi = int(round(zf / z_step)) + z_mid
        if (zi > -1) and (zi < max_z):

            for i in range(bead_x.size):

                xf = bead_x[i]
                yf = bead_y[i]
                xi = int(xf)
                yi = int(yf)

                # Get localization image.
                mat = image[xi - aoi_size:xi + aoi_size,
                            yi - aoi_size:yi + aoi_size]

                # Zoom in by 2x.
                psf = scipy.ndimage.interpolation.zoom(mat, 2.0)

                # Re-center image.
                psf = scipy.ndimage.interpolation.shift(
                    psf, (-2.0 * (xf - xi), -2.0 * (yf - yi)), mode='nearest')

                # Add to average psf accumulator.
                average_psf[zi, :, :] += psf
                totals[zi] += 1

    # Force PSF to be zero (on average) at the boundaries.
    for i in range(max_z):
        edge = numpy.concatenate((average_psf[i, 0, :], average_psf[i, -1, :],
                                  average_psf[i, :, 0], average_psf[i, :, -1]))
        average_psf[i, :, :] -= numpy.mean(edge)

    # Normalize PSF.
    for i in range(max_z):
        if (totals[i] > 0.0):
            average_psf[i, :, :] = average_psf[i, :, :] / numpy.sum(
                numpy.abs(average_psf[i, :, :]))

    average_psf = average_psf / numpy.max(average_psf)

    # Save PSF (in image form).
    if True:
        import os
        import storm_analysis.sa_library.daxwriter as daxwriter
        dxw = daxwriter.DaxWriter(
            os.path.join(os.path.dirname(psf_name), "psf_beads.dax"),
            average_psf.shape[1], average_psf.shape[2])
        for i in range(max_z):
            #print i, numpy.max(average_psf[i,:,:])
            dxw.addFrame(1000.0 * average_psf[i, :, :] + 100)
        dxw.close()

    # Save PSF.
    cur_z = -z_range
    z_vals = []
    for i in range(max_z):
        z_vals.append(cur_z)
        cur_z += z_step

    dict = {
        "psf": average_psf,
        "pixel_size": 0.080,  # 1/2 the camera pixel size in nm.
        "type": "3D",
        "zmin": -z_range,
        "zmax": z_range,
        "zvals": z_vals
    }

    pickle.dump(dict, open(psf_name, 'wb'))
Beispiel #10
0
    def __init__(self,
                 image_size,
                 spline_file,
                 number_zvals,
                 timestep,
                 upsample=1,
                 check_psf=True):
        """
        Upsample is the multiplier to use for re-sizing the image,
        for example upsample = 2 means to enlarge by 2x.
        """

        self.background = numpy.zeros(image_size)
        self.psf_heights = []
        self.upsample = int(upsample)

        im_size_x, im_size_y = image_size
        size_x = im_size_x * self.upsample
        size_y = im_size_y * self.upsample

        # Load spline.
        s_to_psf = splineToPSF.loadSpline(spline_file)

        # Get size in X and Y.
        self.spline_size_x = self.spline_size_y = s_to_psf.getSize()

        # Calculate z values to use if 3D.
        if (s_to_psf.getType() == "3D"):
            self.z_min = s_to_psf.getZMin()
            self.z_max = s_to_psf.getZMax()
            z_step = (self.z_max - self.z_min) / float(number_zvals - 1.0)
            self.zvals = []
            for i in range(number_zvals):
                self.zvals.append(self.z_min + float(i) * z_step)
        else:
            self.z_min = 0.0
            self.z_max = 1.0
            self.zvals = [0.0]

        psfs = numpy.zeros((size_x, size_y, len(self.zvals)))

        # Add PSFs.
        for i in range(len(self.zvals)):
            psfs[:, :, i] = s_to_psf.getPSF(self.zvals[i],
                                            shape=(im_size_x, im_size_y),
                                            up_sample=upsample,
                                            normalize=True)
            self.psf_heights.append(numpy.max(psfs[:, :, i]))
            #print "fista_decon", i, numpy.max(psfs[:,:,i])

        # Check PSFs.
        if check_psf:
            import os
            import storm_analysis.sa_library.daxwriter as daxwriter

            psf_data = daxwriter.DaxWriter(
                os.path.join(os.path.dirname(spline_file),
                             "fista_decon_psf.dax"), psfs.shape[0],
                psfs.shape[1])
            for i in range(psfs.shape[2]):
                temp = psfs[:, :, i]
                psf_data.addFrame(1000.0 * temp / numpy.max(temp))
            psf_data.close()

        if 0:
            # Python solver (useful for debugging).
            print("Using Python solver.")
            self.fsolver = fista_3d.FISTA(psfs, timestep)
        else:
            # C solver (about 4x faster).
            print("Using C solver.")
            self.fsolver = fistaFFTC.FISTA(psfs, timestep)
Beispiel #11
0
import storm_analysis.sa_library.daxwriter as daxwriter

import scmos_utilities_c

if (len(sys.argv) != 6):
    print("usage: <input_dax> <output_dax> <calib> <sigma> <frames>")
    exit()

# Open the input file.
in_file = datareader.inferReader(sys.argv[1])
f_len = in_file.filmSize()[2]
if (int(sys.argv[5]) > 0) and (int(sys.argv[5]) < f_len):
    f_len = int(sys.argv[5])

# Open the output file.
out_file = daxwriter.DaxWriter(sys.argv[2], 0, 0)

# Load camera calibration (sliced as appropriate
# for the ROI) and create the smoother class.
[offset, variance, gain] = numpy.load(sys.argv[3])
smoother = scmos_utilities_c.Smoother(offset, variance, gain)

# Load images, smooth & output.
sigma_psf = int(round(float(sys.argv[4])))
for i in range(f_len):
    print("Smoothing frame", i)
    in_image = in_file.loadAFrame(i)
    sm_image = smoother.smoothImage(in_image, sigma_psf) + 100.0
    out_file.addFrame(sm_image)

out_file.close()
Beispiel #12
0
    def simulate(self, dax_file, bin_file, n_frames):

        #
        # Initialization.
        #
        dax_data = daxwriter.DaxWriter(dax_file, self.x_size, self.y_size)
        i3_data_in = readinsight3.loadI3File(bin_file)

        out_fname_base = dax_file[:-4]
        i3_data_out = writeinsight3.I3Writer(out_fname_base + "_olist.bin")
        sim_settings = open(out_fname_base + "_sim_params.txt", "w")

        #
        # Create the user-specified class instances that will do
        # most of the actual work of the simulation.
        #
        bg = self.bg_factory(sim_settings, self.x_size, self.y_size,
                             i3_data_in)
        cam = self.cam_factory(sim_settings, self.x_size, self.y_size,
                               i3_data_in)
        drift = None
        if self.drift_factory is not None:
            drift = self.drift_factory(sim_settings, self.x_size, self.y_size,
                                       i3_data_in)
        pp = self.pphys_factory(sim_settings, self.x_size, self.y_size,
                                i3_data_in)
        psf = self.psf_factory(sim_settings, self.x_size, self.y_size,
                               i3_data_in)

        sim_settings.write(
            json.dumps({
                "simulation": {
                    "bin_file": bin_file,
                    "x_size": str(self.x_size),
                    "y_size": str(self.y_size)
                }
            }) + "\n")

        #
        # Generate the simulated movie.
        #
        for i in range(n_frames):

            # Generate the new image.
            image = numpy.zeros((self.x_size, self.y_size))

            # Get the emitters that are on in the current frame.
            cur_i3 = pp.getEmitters(i).copy()

            print("Frame", i, cur_i3['x'].size, "emitters")

            # Add background to image.
            image += bg.getBackground(i)

            # Set 'bg' parameter of the emitters.
            cur_i3 = bg.getEmitterBackground(cur_i3)

            # Apply drift to the localizations.
            if drift is not None:
                drift.drift(i, cur_i3)

            # Foreground
            image += psf.getPSFs(cur_i3)

            # Camera
            image = cam.readImage(image)

            # Save the image.
            dax_data.addFrame(image)

            # Save the molecule locations.
            cur_i3['fr'] = i + 1
            i3_data_out.addMolecules(cur_i3)

        dax_data.close()
        i3_data_out.close()
        sim_settings.close()
Beispiel #13
0
    # Create a dax movie from a hres file.
    if 0:
        import storm_analysis.sa_library.daxwriter as daxwriter

        if (len(sys.argv) != 4):
            print("usage: <in_hres> <out_dax> <binning>")
            exit()

        print("Loading High Res Data")
        hresf = HResFile(sys.argv[1])

        print("Creating Dax File")
        print("  Size info:", hresf.getSize())
        [xs, ys, ff, lf] = hresf.getSize()

        dax_data = daxwriter.DaxWriter(sys.argv[2], 0, 0)
        binning = int(sys.argv[3])

        for i in range(ff, lf + 1):
            print("Creating frame:", i)
            frame = hresf.getFrame(i, binning)
            dax_data.addFrame(frame)

        dax_data.close()

    # Create an image from a hres file
    if 1:
        import os

        import storm_analysis.sa_library.arraytoimage as arraytoimage
        import storm_analysis.sa_library.daxwriter as daxwriter
def psfToSpline(psf_name, spline_name, s_size):
    
    psf_data = pickle.load(open(psf_name, 'rb'))
    np_psf = psf_data["psf"]
    spline = False
    start = np_psf.shape[1]/2.0 - s_size - 0.5


    # 2D spline
    if (len(np_psf.shape) == 2):
        print("Generating 2D spline.")
        s_size = 2*s_size

        np_spline = numpy.zeros((s_size, s_size))
        #np_psf = np_psf/numpy.max(np_psf)
        xy_spline = spline2D.Spline2D(np_psf)
        
        x = start
        for i in range(s_size):
            y = start
            for j in range(s_size):
                np_spline[j,i] = xy_spline.f(y,x)
            
                y += 1.0
            x += 1.0

        print("Calculating spline coefficients.")
        spline = spline2D.Spline2D(np_spline)

        if True:
            import storm_analysis.sa_library.daxwriter as daxwriter
            daxwriter.singleFrameDax(os.path.join(os.path.dirname(spline_name), "spline.dax"), 1000.0*np_spline + 100)


    # 3D spline
    else:
        print("Generating 3D spline.")
        s_size = 2*s_size

        np_spline = numpy.zeros((s_size, s_size, s_size))
        xy_splines = []

        print("Generating XY splines.")
        for i in range(np_psf.shape[0]):
            xy_splines.append(spline2D.Spline2D(np_psf[i,:,:]))

        print("Generating fitting spline.")
        x = start
        for i in range(s_size):
            y = start
            for j in range(s_size):

                zvals = numpy.zeros(np_psf.shape[0])
                for k in range(np_psf.shape[0]):
                    zvals[k] = xy_splines[k].f(y,x)
                    z_spline = spline1D.Spline1D(zvals)

                max_z = float(np_psf.shape[0]) - 1.0
                inc = max_z/(float(s_size)-1.0)
                for k in range(s_size):
                    z = float(k)*inc
                    if (z > max_z):
                        z = max_z
                    np_spline[k,j,i] = z_spline.f(z)

                y += 1.0
            x += 1.0

        print("Calculating spline coefficients.")
        spline = spline3D.Spline3D(np_spline)

        if True:
            import storm_analysis.sa_library.daxwriter as daxwriter
            dxw = daxwriter.DaxWriter(os.path.join(os.path.dirname(spline_name), "spline.dax"),
                                      np_spline.shape[1],
                                      np_spline.shape[2])
            for i in range(s_size):
                dxw.addFrame(1000.0*np_spline[i,:,:] + 100)
            dxw.close()

    del psf_data["psf"]
    psf_data["spline"] = np_spline
    psf_data["coeff"] = spline.getCoeff()
    pickle.dump(psf_data, open(spline_name, 'wb'))
Beispiel #15
0
        return self.zmin

    def getZMax(self):
        return self.zmax


def loadSpline(spline_file):
    spline_data = pickle.load(open(spline_file, 'rb'))
    if (spline_data["type"] == "3D"):
        return SplineToPSF3D(spline_data)
    else:
        return SplineToPSF2D(spline_data)


if (__name__ == "__main__"):
    import sys
    import storm_analysis.sa_library.daxwriter as daxwriter

    if (len(sys.argv) != 3):
        print("usage: <spline (input)> <dax (output)>")
        exit()

    stp = SplineToPSF3D(sys.argv[1])
    size = (stp.getSize() - 1) / 2
    dax_data = daxwriter.DaxWriter(sys.argv[2], size, size)
    for z in [-500.0, -250.0, 0.0, 250.0, 500.0]:
        psf = stp.getPSF(z)
        dax_data.addFrame(1000.0 * psf + 100.0)

    dax_data.close()
        help=
        "The name of the output movie (with background subtracted). This will be in .dax format."
    )
    parser.add_argument('--xml',
                        dest='settings',
                        type=str,
                        required=True,
                        help="The name of the settings xml file.")

    args = parser.parse_args()

    # Load movies and parameters.
    input_movie = datareader.inferReader(args.in_movie)
    [w, h, l] = input_movie.filmSize()

    output_movie = daxwriter.DaxWriter(args.out_movie, h, w)
    parameters = params.ParametersCommon().initFromFile(args.settings)

    n_frames = parameters.getAttr("max_frame")
    if (n_frames > l) or (n_frames == -1):
        n_frames = l

    # Default to a sample size if the settings file does not specify this.
    sample_size = 100
    if (parameters.getAttr("static_background_estimate", 0) > 0):
        sample_size = parameters.getAttr("static_background_estimate")
    else:
        print(
            "Did not find parameter 'static_background_estimate' in parameters file, defaulting to",
            sample_size)
Beispiel #17
0
    else:
        # Rolling ball background removal.
        rb = rollingBall.RollingBall(parameters.getAttr("rb_radius"),
                                     parameters.getAttr("rb_sigma"))
        background = rb.estimateBG(image)

    fdecon.newImage(image, background)

    fdecon.decon(parameters.getAttr("fista_iterations"),
                 parameters.getAttr("fista_lambda"),
                 verbose=True)

    # Save results.
    fx = fdecon.getXVector()
    print(numpy.min(fx), numpy.max(fx))
    decon_data = daxwriter.DaxWriter(sys.argv[3], fx.shape[0], fx.shape[1])
    for i in range(fx.shape[2]):
        decon_data.addFrame(fx[:, :, i])
    decon_data.close()

    # Find peaks in the decon data.
    peaks = fdecon.getPeaks(parameters.getAttr("threshold"), 5)

    zci = utilC.getZCenterIndex()
    z_min, z_max = fdecon.getZRange()
    peaks[:, zci] = 1.0e-3 * ((z_max - z_min) * peaks[:, zci] + z_min)

    i3_writer = writeinsight3.I3Writer(sys.argv[3][:-4] + "_flist.bin")
    i3_writer.addMultiFitMolecules(peaks, x_size, y_size, 1,
                                   parameters.getAttr("pixel_size"))
    i3_writer.close()
Beispiel #18
0
#
# Hazen 09/14
#

import glob
import numpy
import sys

import storm_analysis.sa_library.daxwriter as daxwriter
import storm_analysis.sa_library.datareader as datareader

if (len(sys.argv) != 3):
    print("usage: <dax> <tiff dir>")
    exit()

dax_file = daxwriter.DaxWriter(sys.argv[1], 0, 0)
tiff_files = sorted(glob.glob(sys.argv[2] + "*.tif"))

if (len(tiff_files) == 0):
    print("No tiff files found in '" + sys.argv[2] + "'")
    exit()

for tiff_image in tiff_files:
    print(tiff_image)
    data = datareader.TifReader(tiff_image).loadAFrame(0)
    if 0:
        data = data - numpy.median(data) + 2000
    dax_file.addFrame(data)

dax_file.close()
Beispiel #19
0
    edge = numpy.concatenate((average_psf[i, 0, :], average_psf[i, -1, :],
                              average_psf[i, :, 0], average_psf[i, :, -1]))
    average_psf[i, :, :] -= numpy.mean(edge)

# Normalize PSF.
for i in range(max_z):
    if (totals[i] > 0.0):
        average_psf[i, :, :] = average_psf[i, :, :] / numpy.sum(
            numpy.abs(average_psf[i, :, :]))

average_psf = average_psf / numpy.max(average_psf)

# Save PSF (in image form).
if True:
    import storm_analysis.sa_library.daxwriter as daxwriter
    dxw = daxwriter.DaxWriter("psf_beads.dax", average_psf.shape[1],
                              average_psf.shape[2])
    for i in range(max_z):
        #print i, numpy.max(average_psf[i,:,:])
        dxw.addFrame(1000.0 * average_psf[i, :, :] + 100)
    dxw.close()

# Save PSF.
cur_z = -z_range
z_vals = []
for i in range(max_z):
    z_vals.append(cur_z)
    cur_z += z_step

dict = {
    "psf": average_psf,
    "pixel_size": 0.080,  # 1/2 the camera pixel size in nm.
Beispiel #20
0
 def saveStack(name, stack):
     daxf = daxwriter.DaxWriter(name, stack.shape[1], stack.shape[2])
     for i in range(stack.shape[0]):
         daxf.addFrame(stack[i, :, :])
     daxf.close()
Beispiel #21
0
#import astigmaticPSF as PSF
import dhPSF as PSF

if (len(sys.argv) != 5):
    print("usage: <dax> <bin> <frames> <num_objects>")
    exit()

# Peak height.
intensity = 1000.0

# Image size.
x_size = 256
y_size = 256

dax_data = daxwriter.DaxWriter(sys.argv[1], x_size, y_size)
i3_data = writeinsight3.I3Writer(sys.argv[2])
num_frames = int(sys.argv[3])
num_objects = int(sys.argv[4])

for i in range(num_frames):
    print("Generating frame:", i)

    # Generate locations
    x_vals = numpy.zeros(num_objects)
    y_vals = numpy.zeros(num_objects)
    z_vals = numpy.zeros(num_objects)
    h_vals = numpy.ones(num_objects) * intensity

    for j in range(num_objects):
Beispiel #22
0
def measurePSF(movie_name,
               zfile_name,
               movie_mlist,
               psf_name,
               want2d=False,
               aoi_size=12,
               z_range=750.0,
               z_step=50.0):
    """
    The actual z range is 2x z_range (i.e. from -z_range to z_range).
    """

    # Load dax file, z offset file and molecule list file.
    dax_data = datareader.inferReader(movie_name)
    z_offsets = None
    if os.path.exists(zfile_name):
        try:
            z_offsets = numpy.loadtxt(zfile_name, ndmin=2)[:, 1]
        except IndexError:
            z_offsets = None
            print("z offsets were not loaded.")
    i3_data = readinsight3.loadI3File(movie_mlist)

    if want2d:
        print("Measuring 2D PSF")
    else:
        print("Measuring 3D PSF")

    #
    # Go through the frames identifying good peaks and adding them
    # to the average psf. For 3D molecule z positions are rounded to
    # the nearest 50nm.
    #
    z_mid = int(z_range / z_step)
    max_z = 2 * z_mid + 1

    average_psf = numpy.zeros((max_z, 4 * aoi_size, 4 * aoi_size))
    peaks_used = 0
    totals = numpy.zeros(max_z)
    [dax_x, dax_y, dax_l] = dax_data.filmSize()
    for curf in range(dax_l):

        # Select localizations in current frame & not near the edges.
        mask = (i3_data['fr'] == curf + 1) & (i3_data['x'] > aoi_size) & (
            i3_data['x'] <
            (dax_x - aoi_size - 1)) & (i3_data['y'] >
                                       aoi_size) & (i3_data['y'] <
                                                    (dax_y - aoi_size - 1))
        xr = i3_data['x'][mask]
        yr = i3_data['y'][mask]

        # Use the z offset file if it was specified, otherwise use localization z positions.
        if z_offsets is None:
            if (curf == 0):
                print("Using fit z locations.")
            zr = i3_data['z'][mask]
        else:
            if (curf == 0):
                print("Using z offset file.")
            zr = numpy.ones(xr.size) * z_offsets[curf]

        ht = i3_data['h'][mask]

        # Remove localizations that are too close to each other.
        in_peaks = numpy.zeros((xr.size, util_c.getNPeakPar()))
        in_peaks[:, util_c.getXCenterIndex()] = xr
        in_peaks[:, util_c.getYCenterIndex()] = yr
        in_peaks[:, util_c.getZCenterIndex()] = zr
        in_peaks[:, util_c.getHeightIndex()] = ht

        out_peaks = util_c.removeNeighbors(in_peaks, 2 * aoi_size)
        #out_peaks = util_c.removeNeighbors(in_peaks, aoi_size)

        print(curf, "peaks in", in_peaks.shape[0], ", peaks out",
              out_peaks.shape[0])

        # Use remaining localizations to calculate spline.
        image = dax_data.loadAFrame(curf).astype(numpy.float64)

        xr = out_peaks[:, util_c.getXCenterIndex()]
        yr = out_peaks[:, util_c.getYCenterIndex()]
        zr = out_peaks[:, util_c.getZCenterIndex()]
        ht = out_peaks[:, util_c.getHeightIndex()]

        for i in range(xr.size):
            xf = xr[i]
            yf = yr[i]
            zf = zr[i]
            xi = int(xf)
            yi = int(yf)
            if want2d:
                zi = 0
            else:
                zi = int(round(zf / z_step) + z_mid)

            # check the z is in range
            if (zi > -1) and (zi < max_z):

                # get localization image
                mat = image[xi - aoi_size:xi + aoi_size,
                            yi - aoi_size:yi + aoi_size]

                # zoom in by 2x
                psf = scipy.ndimage.interpolation.zoom(mat, 2.0)

                # re-center image
                psf = scipy.ndimage.interpolation.shift(
                    psf, (-2.0 * (xf - xi), -2.0 * (yf - yi)), mode='nearest')

                # add to average psf accumulator
                average_psf[zi, :, :] += psf
                totals[zi] += 1

    # Force PSF to be zero (on average) at the boundaries.
    for i in range(max_z):
        edge = numpy.concatenate((average_psf[i, 0, :], average_psf[i, -1, :],
                                  average_psf[i, :, 0], average_psf[i, :, -1]))
        average_psf[i, :, :] -= numpy.mean(edge)

    # Normalize the PSF.
    if want2d:
        max_z = 1

    for i in range(max_z):
        print(i, totals[i])
        if (totals[i] > 0.0):
            average_psf[i, :, :] = average_psf[i, :, :] / numpy.sum(
                numpy.abs(average_psf[i, :, :]))

    average_psf = average_psf / numpy.max(average_psf)

    # Save PSF (in image form).
    if True:
        import storm_analysis.sa_library.daxwriter as daxwriter
        dxw = daxwriter.DaxWriter(
            os.path.join(os.path.dirname(psf_name), "psf.dax"),
            average_psf.shape[1], average_psf.shape[2])
        for i in range(max_z):
            dxw.addFrame(1000.0 * average_psf[i, :, :] + 100)
        dxw.close()

    # Save PSF.
    if want2d:
        psf_dict = {"psf": average_psf[0, :, :], "type": "2D"}

    else:
        cur_z = -z_range
        z_vals = []
        for i in range(max_z):
            z_vals.append(cur_z)
            cur_z += z_step

        psf_dict = {
            "psf": average_psf,
            "pixel_size": 0.080,  # 1/2 the camera pixel size in nm.
            "type": "3D",
            "zmin": -z_range,
            "zmax": z_range,
            "zvals": z_vals
        }

    with open(psf_name, 'wb') as fp:
        pickle.dump(psf_dict, fp)