def psfZStack(movie_name, h5_filename, zstack_name, scmos_cal = None, aoi_size = 8, driftx = 0.0, drifty = 0.0): """ movie_name - The movie file containing the z stack. h5_filename - The HDF5 file containing the localizations to use for the PSF measurement. zstack_name - The name of the file to save the zstack in. scmos_cal - The sCMOS calibration file. aoi_size - The AOI size in pixels. driftx, drifty are in units of pixels per frame, (bead x last frame - bead x first frame)/n_frames. """ # Create appropriate reader. if scmos_cal is None: fr_reader = datareader.inferReader(movie_name) else: fr_reader = analysisIO.FrameReaderSCMOS(movie_file = movie_name, calibration_file = scmos_cal) [movie_x, movie_y, movie_len] = fr_reader.filmSize() # Load localizations. with saH5Py.SAH5Py(h5_filename) as h5: locs = h5.getLocalizations() x = locs["y"] + 1 y = locs["x"] + 1 # Measure Z stacks. z_stacks = [] for i in range(x.size): z_stacks.append(numpy.zeros((4*aoi_size, 4*aoi_size, movie_len))) for i in range(movie_len): if((i%50)==0): print("Processing frame {0:0d}".format(i)) # Load the frame. This also handles gain and offset correction. # frame = fr_reader.loadAFrame(i) # Subtract estimated background. This assumes that the image is # mostly background and that the background is uniform. # frame = frame - numpy.median(frame) for j in range(x.size): xf = x[j] + driftx * float(i) yf = y[j] + drifty * float(i) z_stacks[j][:,:,i] = measurePSFUtils.extractAOI(frame, aoi_size, xf, yf) # Save z_stacks. numpy.save(zstack_name + ".npy", z_stacks) # Save a (normalized) z_stack as tif for inspection purposes. z_stack = z_stacks[0] z_stack = z_stack/numpy.amax(z_stack) z_stack = z_stack.astype(numpy.float32) with tifffile.TiffWriter(zstack_name + ".tif") as tf: for i in range(movie_len): tf.save(z_stack[:,:,i])
def measurePSF(movie_name, zfile_name, movie_h5_name, psf_name, want2d=False, aoi_size=12, pixel_size=0.1, z_range=0.75, z_step=0.05): """ movie_name - The name of the movie file. zfile_name - The name of the text file containing z offset data. If this does not exist then the localizations z value will be used. movie_h5_name - The name of the HDF5 file containing the localization information. psf_name - The name of the file to save the measured PSF in. want2d - Measure a 2D PSF. aoi_size - The final AOI size will 2x this number (in pixels). pixel_size - The pixel size in microns. z_range - The z range of the PSF (in microns). The actual z range is 2x z_range (i.e. from -z_range to z_range). z_step - The z granularity of the PSF (in microns). """ # Create z scaling object. z_sclr = measurePSFUtils.ZScaler(z_range, z_step) # Load dax file, z offset file and molecule list file. dax_data = datareader.inferReader(movie_name) z_off = None if os.path.exists(zfile_name): data = numpy.loadtxt(zfile_name, ndmin=2) valid = data[:, 0] z_off = data[:, 1] if want2d: print("Measuring 2D PSF") else: print("Measuring 3D PSF") # Go through the frames identifying good peaks and adding them # to the average psf. # max_z = z_sclr.getMaxZ() average_psf = numpy.zeros((max_z, 2 * aoi_size, 2 * aoi_size)) peaks_used = 0 totals = numpy.zeros(max_z, dtype=numpy.int) with saH5Py.SAH5Py(movie_h5_name) as h5: [dax_x, dax_y, dax_l] = dax_data.filmSize() for curf, locs in h5.localizationsIterator(): # Select localizations in current frame & not near the edges. mask = (locs['x'] > aoi_size) & (locs['x'] < (dax_x - aoi_size - 1)) & ( locs['y'] > aoi_size) & (locs['y'] < (dax_y - aoi_size - 1)) xr = locs['y'][mask] + 1 yr = locs['x'][mask] + 1 # Use the z offset file if it was specified, otherwise use localization z positions. if z_off is None: if (curf == 0): print("Using fit z locations.") zr = locs['z'][mask] else: if (curf == 0): print("Using z offset file.") if (abs(valid[curf]) < 1.0e-6): continue zr = numpy.ones(xr.size) * z_off[curf] ht = locs['height'][mask] # Remove localizations that are too close to each other. mask = iaUtilsC.removeNeighborsMask(xr, yr, 2.0 * aoi_size) print(curf, "peaks in", xr.size, ", peaks out", numpy.count_nonzero(mask)) xr = xr[mask] yr = yr[mask] zr = zr[mask] ht = ht[mask] # Use remaining localizations to calculate spline. image = dax_data.loadAFrame(curf).astype(numpy.float64) for i in range(xr.size): xf = xr[i] yf = yr[i] zf = zr[i] if want2d: zi = 0 else: zi = z_sclr.convert(zf) # Check that the z value is in range if z_sclr.inRange(zi): # Extract PSF. psf = measurePSFUtils.extractAOI(image, aoi_size, xf, yf) # Add to average psf accumulator average_psf[zi, :, :] += psf totals[zi] += 1 # Check that we got at least one valid measurement. # assert (numpy.max(totals) > 0) # Set the PSF to have zero average on the X/Y boundaries. # for i in range(max_z): edge = numpy.concatenate((average_psf[i, 0, :], average_psf[i, -1, :], average_psf[i, :, 0], average_psf[i, :, -1])) average_psf[i, :, :] -= numpy.mean(edge) # Normalize the PSF. # if want2d: max_z = 1 # Note: I think it makes sense to normalize to a sum of 1.0 here as the user may # be using the images of single localizations as the inputs. Unlike beads # we can't assume that they are all the same brightness so normalizing by # the number of events would make even less sense. # for i in range(max_z): print("z plane {0:0d} has {1:0d} samples".format(i, totals[i])) if (totals[i] > 0.0): average_psf[i, :, :] = average_psf[i, :, :] / numpy.sum( numpy.abs(average_psf[i, :, :])) # Normalize to unity maximum height. if (numpy.max(average_psf) > 0.0): average_psf = average_psf / numpy.max(average_psf) else: print("Warning! Measured PSF maxima is zero or negative!") # Save PSF (in image form). if True: with tifffile.TiffWriter("psf.tif") as tf: for i in range(max_z): tf.save(average_psf[i, :, :].astype(numpy.float32)) # Save PSF. # # At least for now the PSFs use nanometers, not microns. # z_range = z_range * 1.0e+3 z_step = z_step * 1.0e+3 if want2d: psf_dict = { "psf": average_psf[0, :, :], "pixel_size": pixel_size, "type": "2D", "version": 2.0 } else: cur_z = -z_range z_vals = [] for i in range(max_z): z_vals.append(cur_z) cur_z += z_step psf_dict = { "psf": average_psf, "pixel_size": pixel_size, "type": "3D", "version": 2.0, "zmin": -z_range, "zmax": z_range, "zvals": z_vals } with open(psf_name, 'wb') as fp: pickle.dump(psf_dict, fp)
def measurePSF(movie_name, zfile_name, movie_h5_name, psf_name, want2d = False, aoi_size = 12, pixel_size = 0.1, z_range = 0.75, z_step = 0.05): """ movie_name - The name of the movie file. zfile_name - The name of the text file containing z offset data. If this does not exist then the localizations z value will be used. movie_h5_name - The name of the HDF5 file containing the localization information. psf_name - The name of the file to save the measured PSF in. want2d - Measure a 2D PSF. aoi_size - The final AOI size will 2x this number (in pixels). pixel_size - The pixel size in microns. z_range - The z range of the PSF (in microns). The actual z range is 2x z_range (i.e. from -z_range to z_range). z_step - The z granularity of the PSF (in microns). """ # Create z scaling object. z_sclr = measurePSFUtils.ZScaler(z_range, z_step) # Load dax file, z offset file and molecule list file. dax_data = datareader.inferReader(movie_name) z_off = None if os.path.exists(zfile_name): data = numpy.loadtxt(zfile_name, ndmin = 2) valid = data[:,0] z_off = data[:,1] if want2d: print("Measuring 2D PSF") else: print("Measuring 3D PSF") # Go through the frames identifying good peaks and adding them # to the average psf. # max_z = z_sclr.getMaxZ() average_psf = numpy.zeros((max_z, 2*aoi_size, 2*aoi_size)) peaks_used = 0 totals = numpy.zeros(max_z, dtype = numpy.int) with saH5Py.SAH5Py(movie_h5_name) as h5: [dax_x, dax_y, dax_l] = dax_data.filmSize() for curf, locs in h5.localizationsIterator(): # Select localizations in current frame & not near the edges. mask = (locs['x'] > aoi_size) & (locs['x'] < (dax_x - aoi_size - 1)) & (locs['y'] > aoi_size) & (locs['y'] < (dax_y - aoi_size - 1)) xr = locs['y'][mask] + 1 yr = locs['x'][mask] + 1 # Use the z offset file if it was specified, otherwise use localization z positions. if z_off is None: if (curf == 0): print("Using fit z locations.") zr = locs['z'][mask] else: if (curf == 0): print("Using z offset file.") if (abs(valid[curf]) < 1.0e-6): continue zr = numpy.ones(xr.size) * z_off[curf] ht = locs['height'][mask] # Remove localizations that are too close to each other. mask = iaUtilsC.removeNeighborsMask(xr, yr, 2.0 * aoi_size) print(curf, "peaks in", xr.size, ", peaks out", numpy.count_nonzero(mask)) xr = xr[mask] yr = yr[mask] zr = zr[mask] ht = ht[mask] # Use remaining localizations to calculate spline. image = dax_data.loadAFrame(curf).astype(numpy.float64) for i in range(xr.size): xf = xr[i] yf = yr[i] zf = zr[i] if want2d: zi = 0 else: zi = z_sclr.convert(zf) # Check that the z value is in range if z_sclr.inRange(zi): # Extract PSF. psf = measurePSFUtils.extractAOI(image, aoi_size, xf, yf) # Add to average psf accumulator average_psf[zi,:,:] += psf totals[zi] += 1 # Check that we got at least one valid measurement. # assert (numpy.max(totals) > 0) # Set the PSF to have zero average on the X/Y boundaries. # for i in range(max_z): edge = numpy.concatenate((average_psf[i,0,:], average_psf[i,-1,:], average_psf[i,:,0], average_psf[i,:,-1])) average_psf[i,:,:] -= numpy.mean(edge) # Normalize the PSF. # if want2d: max_z = 1 # Note: I think it makes sense to normalize to a sum of 1.0 here as the user may # be using the images of single localizations as the inputs. Unlike beads # we can't assume that they are all the same brightness so normalizing by # the number of events would make even less sense. # for i in range(max_z): print("z plane {0:0d} has {1:0d} samples".format(i, totals[i])) if (totals[i] > 0.0): average_psf[i,:,:] = average_psf[i,:,:]/numpy.sum(numpy.abs(average_psf[i,:,:])) # Normalize to unity maximum height. if (numpy.max(average_psf) > 0.0): average_psf = average_psf/numpy.max(average_psf) else: print("Warning! Measured PSF maxima is zero or negative!") # Save PSF (in image form). # # FIXME: This may be useful but it is annoying for automated testing as this file # is created in which ever directory the tests are run in. # if True: with tifffile.TiffWriter("psf.tif") as tf: for i in range(max_z): tf.save(average_psf[i,:,:].astype(numpy.float32)) # Save PSF. # # At least for now the PSFs use nanometers, not microns. # z_range = z_range * 1.0e+3 z_step = z_step * 1.0e+3 if want2d: psf_dict = {"psf" : average_psf[0,:,:], "pixel_size" : pixel_size, "type" : "2D", "version" : 2.0} else: cur_z = -z_range z_vals = [] for i in range(max_z): z_vals.append(cur_z) cur_z += z_step psf_dict = {"psf" : average_psf, "pixel_size" : pixel_size, "type" : "3D", "version" : 2.0, "zmin" : -z_range, "zmax" : z_range, "zvals" : z_vals} with open(psf_name, 'wb') as fp: pickle.dump(psf_dict, fp)
def test_extract_roi_1(): """ Test range checking in extractROI. """ frame = numpy.zeros((100, 100)) aoi_size = 10 # X test 1. im = mPSFUtils.extractAOI(frame, aoi_size, 10, 20) assert (im.shape[0] == 2 * aoi_size) assert (im.shape[1] == 2 * aoi_size) okay = False try: mPSFUtils.extractAOI(frame, aoi_size, 9, 20) except AssertionError: okay = True assert okay # Y test 1. im = mPSFUtils.extractAOI(frame, aoi_size, 20, 10) assert (im.shape[0] == 2 * aoi_size) assert (im.shape[1] == 2 * aoi_size) okay = False try: mPSFUtils.extractAOI(frame, aoi_size, 20, 9) except AssertionError: okay = True assert okay # X test 2. im = mPSFUtils.extractAOI(frame, aoi_size, 90, 20) assert (im.shape[0] == 2 * aoi_size) assert (im.shape[1] == 2 * aoi_size) okay = False try: mPSFUtils.extractAOI(frame, aoi_size, 91, 20) except AssertionError: okay = True assert okay # Y test 2. im = mPSFUtils.extractAOI(frame, aoi_size, 20, 90) assert (im.shape[0] == 2 * aoi_size) assert (im.shape[1] == 2 * aoi_size) okay = False try: mPSFUtils.extractAOI(frame, aoi_size, 20, 91) except AssertionError: okay = True assert okay