def __init__(self, image_size, spline_file, number_zvals, timestep, upsample=1): self.background = numpy.zeros(image_size) self.psf_heights = [] self.upsample = int(upsample) im_size_x, im_size_y = image_size size_x = im_size_x * self.upsample size_y = im_size_y * self.upsample s_to_psf = splineToPSF.SplineToPSF(spline_file) self.spline_size_x = self.spline_size_y = s_to_psf.getSize() # Calculate z values to use. self.z_min = s_to_psf.getZMin() self.z_max = s_to_psf.getZMax() z_step = (self.z_max - self.z_min) / float(number_zvals - 1.0) self.zvals = [] for i in range(number_zvals): self.zvals.append(self.z_min + float(i) * z_step) psfs = numpy.zeros((size_x, size_y, len(self.zvals))) # Add PSFs. for i in range(len(self.zvals)): psfs[:, :, i] = s_to_psf.getPSF(self.zvals[i], shape=(im_size_x, im_size_y), up_sample=upsample, normalize=True) self.psf_heights.append(numpy.max(psfs[:, :, i])) #print "fista_decon", i, numpy.max(psfs[:,:,i]) # Check PSFs. if 1: import sa_library.daxwriter as daxwriter psf_data = daxwriter.DaxWriter("fista_decon_psf.dax", psfs.shape[0], psfs.shape[1]) for i in range(psfs.shape[2]): temp = psfs[:, :, i] psf_data.addFrame(1000.0 * temp / numpy.max(temp)) psf_data.close() if 0: # Python solver (useful for debugging). print "Using Python solver." self.fsolver = fista_3d.FISTA(psfs, timestep) else: # C solver (about 4x faster). print "Using C solver." self.fsolver = fistaFFTC.FISTA(psfs, timestep)
def saveAsDax(file_name, A, measured_pixels): import sa_library.daxwriter as daxwriter dx = daxwriter.DaxWriter(file_name,0,0) ncols = A.shape[1] for i in range(A.shape[1]): x = numpy.zeros(ncols) x[i] = 1.0 b = numpy.dot(A,x) b = b.reshape(measured_pixels,measured_pixels) dx.addFrame(10000.0*b) dx.close()
def writeDax(dax_file, dax_data, rescale=False): """This saves dax files given numpy arrays. It automatically converts to uint16""" import sa_library.daxwriter as daxwriter dax_data_ = np.array(dax_data) if rescale: dax_data_ = np.array(dax_data, dtype=float) min_ = np.min(dax_data_) max_ = np.max(dax_data_) if max_ - min_ != 0: dax_data_ = (dax_data_ - min_) / (max_ - min_) else: dax_data_ = dax_data_ - min_ dax_data_ = dax_data_ * (2**16 - 1) dax_data_ = np.array(dax_data_, dtype=np.uint16) w, h = dax_data_[0].shape DaxWriter_ = daxwriter.DaxWriter(dax_file, w, h) for f in dax_data_: DaxWriter_.addFrame(f) DaxWriter_.close()
for k in range(np_psf.shape[0]): zvals[k] = xy_splines[k].f(y, x) z_spline = spline1D.Spline1D(zvals) max_z = float(np_psf.shape[0]) - 1.0 inc = max_z / (float(s_size) - 1.0) for k in range(s_size): z = float(k) * inc if (z > max_z): z = max_z np_spline[k, j, i] = z_spline.f(z) y += 1.0 x += 1.0 print("Calculating spline coefficients.") spline = spline3D.Spline3D(np_spline) if 1: import storm_analysis.sa_library.daxwriter as daxwriter dxw = daxwriter.DaxWriter("spline.dax", np_spline.shape[1], np_spline.shape[2]) for i in range(s_size): dxw.addFrame(1000.0 * np_spline[i, :, :] + 100) dxw.close() del psf_data["psf"] psf_data["spline"] = np_spline psf_data["coeff"] = spline.getCoeff() pickle.dump(psf_data, open(sys.argv[2], 'wb'))
#import astigmaticPSF as PSF import dhPSF as PSF if (len(sys.argv) != 5): print "usage: <dax> <bin> <frames> <num_objects>" exit() # Peak height. intensity = 500.0 # Image size. x_size = 256 y_size = 256 dax_data = daxwriter.DaxWriter(sys.argv[1], x_size, y_size) i3_data = writeinsight3.I3Writer(sys.argv[2]) num_frames = int(sys.argv[3]) num_objects = int(sys.argv[4]) for i in range(num_frames): print "Generating frame:", i # Generate locations x_vals = numpy.zeros(num_objects) y_vals = numpy.zeros(num_objects) z_vals = numpy.zeros(num_objects) h_vals = numpy.ones(num_objects) * intensity for j in range(num_objects):
test_image[5,5,1] = 2.0 test_image[7,6,1] = 1.0 test_image[7,7,1] = 2.0 test_image[7,8,1] = 1.0 [labels, counts] = label(test_image, 0.1, 0) #print "1" #peaks = moments(test_image, labels, counts) #print "2" peaks = getPeaks(test_image, 0.1, 0) print(peaks) labels_image = daxwriter.DaxWriter("fd_util_test.dax", test_image.shape[0], test_image.shape[1]) for i in range(labels.shape[2]): labels_image.addFrame(labels[:,:,i]) labels_image.close() # # The MIT License # # Copyright (c) 2016 Zhuang Lab, Harvard University # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions:
import sa_library.daxwriter as daxwriter import scmos_utilities_c if (len(sys.argv) != 6): print "usage: <input_dax> <output_dax> <calib> <sigma> <frames>" exit() # Open the input file. in_file = datareader.inferReader(sys.argv[1]) f_len = in_file.filmSize()[2] if (int(sys.argv[5]) > 0) and (int(sys.argv[5]) < f_len): f_len = int(sys.argv[5]) # Open the output file. out_file = daxwriter.DaxWriter(sys.argv[2], 0, 0) # Load camera calibration (sliced as appropriate # for the ROI) and create the smoother class. [offset, variance, gain] = numpy.load(sys.argv[3]) smoother = scmos_utilities_c.Smoother(offset, variance, gain) # Load images, smooth & output. sigma_psf = int(round(float(sys.argv[4]))) for i in range(f_len): print "Smoothing frame", i in_image = in_file.loadAFrame(i) sm_image = smoother.smoothImage(in_image, sigma_psf) + 100.0 out_file.addFrame(sm_image) out_file.close()
wavelet_level) if (__name__ == "__main__"): import sys import sa_library.datareader as datareader import sa_library.daxwriter as daxwriter if (len(sys.argv) < 6): print "usage <movie> <wavelet_type> <wavelet_level> <iterations> <threshold> <baseline (optional, 100 default)>" exit() input_movie = datareader.inferReader(sys.argv[1]) output_dax = daxwriter.DaxWriter("subtracted.dax", 0, 0) iterations = int(sys.argv[4]) threshold = float(sys.argv[5]) wavelet_level = int(sys.argv[3]) offset = 100.0 if (len(sys.argv) == 7): offset = float(sys.argv[6]) wbgr = WaveletBGR(wavelet_type=sys.argv[2]) for i in range(input_movie.filmSize()[2]): if ((i % 10) == 0): print "Processing frame", i
# # Hazen 09/14 # import glob import numpy import sys import sa_library.daxwriter as daxwriter import sa_library.datareader as datareader if (len(sys.argv) != 3): print "usage: <dax> <tiff dir>" exit() dax_file = daxwriter.DaxWriter(sys.argv[1], 0, 0) tiff_files = sorted(glob.glob(sys.argv[2] + "*.tif")) if (len(tiff_files) == 0): print "No tiff files found in '" + sys.argv[2] + "'" exit() for tiff_image in tiff_files: print tiff_image data = datareader.TifReader(tiff_image).loadAFrame(0) if 0: data = data - numpy.median(data) + 2000 dax_file.addFrame(data) dax_file.close()
# Create a dax movie from a hres file. if 0: import sa_library.daxwriter as daxwriter if (len(sys.argv) != 4): print "usage: <in_hres> <out_dax> <binning>" exit() print "Loading High Res Data" hresf = HResFile(sys.argv[1]) print "Creating Dax File" print " Size info:", hresf.getSize() [xs, ys, ff, lf] = hresf.getSize() dax_data = daxwriter.DaxWriter(sys.argv[2], 0, 0) binning = int(sys.argv[3]) for i in range(ff, lf + 1): print "Creating frame:", i frame = hresf.getFrame(i, binning) dax_data.addFrame(frame) dax_data.close() # Create an image from a hres file if 1: import os import sa_library.arraytoimage as arraytoimage import sa_library.daxwriter as daxwriter
parameters.wbgr_wavelet_level) else: # Rolling ball background removal. rb = rollingBall.RollingBall(parameters.rb_radius, parameters.rb_sigma) background = rb.estimateBG(image) fdecon.newImage(image, background) fdecon.decon(parameters.fista_iterations, parameters.fista_lambda, verbose=True) # Save results. fx = fdecon.getXVector() print numpy.min(fx), numpy.max(fx) decon_data = daxwriter.DaxWriter(sys.argv[3], fx.shape[0], fx.shape[1]) for i in range(fx.shape[2]): decon_data.addFrame(fx[:, :, i]) decon_data.close() # Find peaks in the decon data. peaks = fdecon.getPeaks(parameters.fista_threshold, 5) zci = utilC.getZCenterIndex() z_min, z_max = fdecon.getZRange() peaks[:, zci] = 1.0e-3 * ((z_max - z_min) * peaks[:, zci] + z_min) i3_writer = writeinsight3.I3Writer(sys.argv[3][:-4] + "_flist.bin") i3_writer.addMultiFitMolecules(peaks, x_size, y_size, 1, parameters.pixel_size) i3_writer.close()
average_psf[i,-1,:], average_psf[i,:,0], average_psf[i,:,-1])) average_psf[i,:,:] -= numpy.mean(edge) # Normalize PSF. for i in range(max_z): if (totals[i] > 0.0): average_psf[i,:,:] = average_psf[i,:,:]/numpy.sum(numpy.abs(average_psf[i,:,:])) average_psf = average_psf/numpy.max(average_psf) # Save PSF (in image form). if 1: import sa_library.daxwriter as daxwriter dxw = daxwriter.DaxWriter("psf_beads.dax", average_psf.shape[1], average_psf.shape[2]) for i in range(max_z): #print i, numpy.max(average_psf[i,:,:]) dxw.addFrame(1000.0 * average_psf[i,:,:] + 100) dxw.close() # Save PSF. cur_z = -z_range z_vals = [] for i in range(max_z): z_vals.append(cur_z) cur_z += z_step dict = {"psf" : average_psf, "pixel_size" : 0.080, # 1/2 the camera pixel size in nm. "type" : "3D",
def saveStack(name, stack): daxf = daxwriter.DaxWriter(name, stack.shape[1], stack.shape[2]) for i in range(stack.shape[0]): daxf.addFrame(stack[i,:,:]) daxf.close()
def getZMax(self): return self.zmax if (__name__ == "__main__"): import sys import sa_library.daxwriter as daxwriter if (len(sys.argv) != 3): print "usage: <spline (input)> <dax (output)>" exit() stp = SplineToPSF(sys.argv[1]) size = (stp.getSize() - 1) / 2 dax_data = daxwriter.DaxWriter(sys.argv[2], size, size) for z in [-500.0, -250.0, 0.0, 250.0, 500.0]: psf = stp.getPSF(z) dax_data.addFrame(1000.0 * psf + 100.0) dax_data.close() # # The MIT License # # Copyright (c) 2016 Zhuang Lab, Harvard University # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell