def test_simulate_3(): """ No photo-physics, spline PSF, sCMOS camera. """ # Only test for Python3 due to pickle incompatibility issues. if (sys.version_info < (3, 0)): return dax_name = storm_analysis.getPathOutputTest("test_sim3.dax") bin_name = storm_analysis.getData("test/data/test_sim.hdf5") cal_name = storm_analysis.getData("test/data/calib.npy") spline_name = storm_analysis.getData("test/data/test_spliner_psf.spline") sim = simulate.Simulate( background_factory=lambda settings, xs, ys, i3data: background. UniformBackground(settings, xs, ys, i3data, photons=20), camera_factory=lambda settings, xs, ys, i3data: camera.SCMOS( settings, xs, ys, i3data, cal_name), photophysics_factory=lambda settings, xs, ys, i3data: photophysics. AlwaysOn(settings, xs, ys, i3data, 2000.0), psf_factory=lambda settings, xs, ys, i3data: psf.Spline( settings, xs, ys, i3data, 160.0, spline_name)) sim.simulate(dax_name, bin_name, 5)
def test_drift_correction(): # Calculate drift correction. param_name = storm_analysis.getData("test/data/test_drift.xml") parameters = params.ParametersAnalysis().initFromFile(param_name) mlist_name = storm_analysis.getData("test/data/test_drift_mlist.bin") drift_output = storm_analysis.getPathOutputTest("test_drift_drift.txt") xyzDriftCorrection.xyzDriftCorrection(mlist_name, drift_output, parameters.getAttr("frame_step"), parameters.getAttr("d_scale"), correct_z=True) # Verify results. diffs = veri.verifyDriftCorrection( storm_analysis.getData("test/data/test_drift.txt"), drift_output) if (diffs[0] > 0.1): raise Exception("Frame numbers do not match.") # These thresholds are somewhat arbitrary. if (diffs[1] > 0.1) or (diffs[2] > 0.1): raise Exception("XY drift correction error.") if (diffs[3] > 30.0): raise Exception("Z drift correction error.")
def test_homotopy_psf(): movie = storm_analysis.getData("test/data/test.dax") mlist = storm_analysis.getData("test/data/test_olist.bin") psf = storm_analysis.getPathOutputTest("l1h_psf.psf") storm_analysis.removeFile(psf) from storm_analysis.L1H.homotopy_psf import homotopyPSF homotopyPSF(movie, mlist, psf)
def test_3ddao_2d_fixed_non_square(): movie_name = storm_analysis.getData("test/data/test_300x200.dax") settings = storm_analysis.getData("test/data/test_3d_2d_fixed.xml") mlist = storm_analysis.getPathOutputTest("test_3d_2d_300x200.bin") storm_analysis.removeFile(mlist) from storm_analysis.daostorm_3d.mufit_analysis import analyze analyze(movie_name, mlist, settings)
def test_3ddao_Z(): movie_name = storm_analysis.getData("test/data/test.dax") settings = storm_analysis.getData("test/data/test_3d_Z.xml") mlist = storm_analysis.getPathOutputTest("test_3d_Z.bin") storm_analysis.removeFile(mlist) from storm_analysis.daostorm_3d.mufit_analysis import analyze analyze(movie_name, mlist, settings)
def test_measure_psf_2D(): movie = storm_analysis.getData("test/data/test.dax") mlist = storm_analysis.getData("test/data/test_ref.hdf5") psf = storm_analysis.getPathOutputTest("test_spliner_psf_2d.psf") storm_analysis.removeFile(psf) from storm_analysis.spliner.measure_psf import measurePSF measurePSF(movie, "", mlist, psf, want2d=True, aoi_size=5)
def test_measure_psf(): movie = storm_analysis.getData("test/data/test_spliner.dax") mlist = storm_analysis.getData("test/data/test_spliner_ref.hdf5") psf = storm_analysis.getPathOutputTest("test_spliner_psf.psf") storm_analysis.removeFile(psf) from storm_analysis.spliner.measure_psf import measurePSF measurePSF(movie, "", mlist, psf)
def test_scmos_2d_fixed(): movie_name = storm_analysis.getData("test/data/test.dax") settings = storm_analysis.getData("test/data/test_sc_2d_fixed.xml") mlist = storm_analysis.getPathOutputTest("test_sc_2d_fixed.bin") storm_analysis.removeFile(mlist) from storm_analysis.sCMOS.scmos_analysis import analyze analyze(movie_name, mlist, settings)
def create2DSpline(): movie = storm_analysis.getData("test/data/test.dax") mlist = storm_analysis.getData("test/data/test_ref.hdf5") psf = storm_analysis.getPathOutputTest("test_spliner_psf_2d.psf") spline = storm_analysis.getPathOutputTest("test_spliner_psf_2d.spline") storm_analysis.removeFile(psf) storm_analysis.removeFile(spline) measurePSF.measurePSF(movie, "", mlist, psf, want2d=True, aoi_size=5) psfToSpline.psfToSpline(psf, spline, 4)
def create2DSpline(): movie = storm_analysis.getData("test/data/test.dax") mlist = storm_analysis.getData("test/data/test_ref.hdf5") psf = storm_analysis.getPathOutputTest("test_spliner_psf_2d.psf") spline = storm_analysis.getPathOutputTest("test_spliner_psf_2d.spline") storm_analysis.removeFile(psf) storm_analysis.removeFile(spline) measurePSF.measurePSF(movie, "", mlist, psf, want2d = True, aoi_size = 5) psfToSpline.psfToSpline(psf, spline, 4)
def create3DSpline(): movie = storm_analysis.getData("test/data/test_spliner.dax") mlist = storm_analysis.getData("test/data/test_spliner_ref.hdf5") psf = storm_analysis.getPathOutputTest("test_spliner_psf.psf") spline = storm_analysis.getPathOutputTest("test_spliner_psf.spline") storm_analysis.removeFile(psf) storm_analysis.removeFile(spline) measurePSF.measurePSF(movie, "", mlist, psf, aoi_size=6) psfToSpline.psfToSpline(psf, spline, 5)
def create3DSpline(): movie = storm_analysis.getData("test/data/test_spliner.dax") mlist = storm_analysis.getData("test/data/test_spliner_ref.hdf5") psf = storm_analysis.getPathOutputTest("test_spliner_psf.psf") spline = storm_analysis.getPathOutputTest("test_spliner_psf.spline") storm_analysis.removeFile(psf) storm_analysis.removeFile(spline) measurePSF.measurePSF(movie, "", mlist, psf, aoi_size = 6) psfToSpline.psfToSpline(psf, spline, 5)
def test_l1h(): # Test L1H. movie_name = storm_analysis.getData("test/data/test_l1h.dax") settings = storm_analysis.getData("test/data/test_l1h.xml") hres = storm_analysis.getPathOutputTest("test_l1h_list.hres") mlist = storm_analysis.getPathOutputTest("test_l1h_list.bin") storm_analysis.removeFile(hres) storm_analysis.removeFile(mlist) from storm_analysis.L1H.cs_analysis import analyze analyze(movie_name, settings, hres, mlist)
def test_3ddao_2d(): movie_name = storm_analysis.getData("test/data/test.dax") settings = storm_analysis.getData("test/data/test_3d_2d.xml") mlist = storm_analysis.getPathOutputTest("test_3d_2d.hdf5") storm_analysis.removeFile(mlist) from storm_analysis.daostorm_3d.mufit_analysis import analyze analyze(movie_name, mlist, settings) # Verify number of localizations found. num_locs = veri.verifyNumberLocalizations(mlist) if not veri.verifyIsCloseEnough(num_locs, 1970): raise Exception("3D-DAOSTORM 2D did not find the expected number of localizations.")
def test_scmos_Z(): movie_name = storm_analysis.getData("test/data/test.dax") settings = storm_analysis.getData("test/data/test_sc_Z.xml") mlist = storm_analysis.getPathOutputTest("test_sc_Z.hdf5") storm_analysis.removeFile(mlist) from storm_analysis.sCMOS.scmos_analysis import analyze analyze(movie_name, mlist, settings) # Verify number of localizations found. num_locs = veri.verifyNumberLocalizations(mlist) if not veri.verifyIsCloseEnough(num_locs, 1942): raise Exception("sCMOS Z did not find the expected number of localizations.")
def test_scmos_Z(): movie_name = storm_analysis.getData("test/data/test.dax") settings = storm_analysis.getData("test/data/test_sc_Z.xml") mlist = storm_analysis.getPathOutputTest("test_sc_Z.bin") storm_analysis.removeFile(mlist) from storm_analysis.sCMOS.scmos_analysis import analyze analyze(movie_name, mlist, settings) # Verify number of localizations found. num_locs = veri.verifyNumberLocalizations(mlist) if not veri.verifyIsCloseEnough(num_locs, 1958): raise Exception( "sCMOS Z did not find the expected number of localizations.")
def test_tracker(): # Test tracking. import shutil settings = storm_analysis.getData("test/data/test_drift.xml") alist_name = storm_analysis.getPathOutputTest("test_drift_alist.bin") # Copy mlist so that it is in the same directory as alist. mlist_data = storm_analysis.getData("test/data/test_drift_mlist.bin") mlist_output = storm_analysis.getPathOutputTest("test_drift_mlist.bin") shutil.copyfile(mlist_data, mlist_output) from storm_analysis.sa_utilities.track_average_correct import trackAverageCorrect trackAverageCorrect(mlist_output, alist_name, settings)
def test_simulate_3(): """ No photo-physics, spline PSF, sCMOS camera. """ dax_name = storm_analysis.getPathOutputTest("test_sim3.dax") bin_name = storm_analysis.getData("test/data/test_sim_olist.bin") cal_name = storm_analysis.getData("test/data/calib.npy") spline_name = storm_analysis.getData("test/data/test_spliner_psf.spline") sim = simulate.Simulate(lambda settings, xs, ys, i3data : background.UniformBackground(settings, xs, ys, i3data, photons = 20), lambda settings, xs, ys, i3data : camera.SCMOS(settings, xs, ys, i3data, 100.0, cal_name), lambda settings, xs, ys, i3data : photophysics.AlwaysOn(settings, xs, ys, i3data, 2000.0), lambda settings, xs, ys, i3data : psf.Spline(settings, xs, ys, i3data, 160.0, spline_name)) sim.simulate(dax_name, bin_name, 5)
def test_3ddao_Z(): movie_name = storm_analysis.getData("test/data/test.dax") settings = storm_analysis.getData("test/data/test_3d_Z.xml") mlist = storm_analysis.getPathOutputTest("test_3d_Z.hdf5") storm_analysis.removeFile(mlist) from storm_analysis.daostorm_3d.mufit_analysis import analyze analyze(movie_name, mlist, settings) # Verify number of localizations found. num_locs = veri.verifyNumberLocalizations(mlist) if not veri.verifyIsCloseEnough(num_locs, 1955): raise Exception( "3D-DAOSTORM Z did not find the expected number of localizations.")
def test_wavelet_bgr(): movie_in = storm_analysis.getData("test/data/test_bg_sub.dax") movie_out = storm_analysis.getPathOutputTest("test_bg_sub_wbgr.dax") from storm_analysis.wavelet_bgr.wavelet_bgr import waveletBGRSub waveletBGRSub(movie_in, movie_out, "db4", 2, 2, 10)
def test_scmos_scmos_cal(): """ Test that scmos calibration data is initialized correctly. """ settings = storm_analysis.getData("test/data/test_sc_2d_fixed.xml") parameters = params.ParametersSCMOS().initFromFile(settings) # Create analysis object and reach deep into it.. find_fit = findPeaks.initFindAndFit(parameters) fitter = find_fit.peak_fitter mfitter = fitter.mfitter # Get sCMOS calibration value. scmos_cal_value = mfitter.scmos_cal[0, 0] # Initialize with an image. image = numpy.ones(mfitter.scmos_cal.shape) fitter.newImage(image) # Verify that the image has the sCMOS term added. resp = mfitter.getResidual() assert (numpy.max(resp - (1.0 + scmos_cal_value)) < 1.0e-6) # Cleanup. fitter.cleanUp()
def test_rolling_ball(): movie_in = storm_analysis.getData("test/data/test_bg_sub.dax") movie_out = storm_analysis.getPathOutputTest("test_bg_sub_rb.dax") from storm_analysis.rolling_ball_bgr.rolling_ball import rollingBallSub rollingBallSub(movie_in, movie_out, 10, 1)
def test_dbscan_clustering(): # Test dbscan import shutil # Copy alist to the output directory so that the DBSCAN results end up in the right place. alist_data = storm_analysis.getData("test/data/test_clustering_list.bin") alist_output = storm_analysis.getPathOutputTest( "test_clustering_alist.bin") shutil.copyfile(alist_data, alist_output) from storm_analysis.dbscan.dbscan_analysis import dbscanAnalysis dbscanAnalysis(alist_output, 0) # Verify number of clusters found. stats_file = storm_analysis.getPathOutputTest( "test_clustering_aclusters_stats.txt") n_clusters = len(open(stats_file).readlines()) if (n_clusters != 99): raise Exception( "DBSCAN did not identify the expected number of clusters.") # Make pictures. clist_name = storm_analysis.getPathOutputTest( "test_clustering_aclusters_size_list.bin") image_name = storm_analysis.getPathOutputTest("test_clustering_db") from storm_analysis.dbscan.cluster_images import clusterImages clusterImages(clist_name, "DBSCAN Clustering", 50, 20, image_name, [256, 256])
def test_simulate_2(): """ (Simple) STORM photo-physics, pure astigmatism PSF, EMCCD camera. """ dax_name = storm_analysis.getPathOutputTest("test_sim2.dax") bin_name = storm_analysis.getData("test/data/test_sim.hdf5") sim = simulate.Simulate( background_factory=lambda settings, xs, ys, i3data: background. UniformBackground(settings, xs, ys, i3data, photons=20), camera_factory=lambda settings, xs, ys, i3data: camera.EMCCD( settings, xs, ys, i3data, 100.0, emccd_gain=5.0, preamp_gain=1.0, read_noise=5), photophysics_factory=lambda settings, xs, ys, i3data: photophysics. SimpleSTORM(settings, xs, ys, i3data, 4000.0, off_time=10.0), psf_factory=lambda settings, xs, ys, i3data: psf.PupilFunction( settings, xs, ys, i3data, 160.0, [[1.3, 2, 2]]), x_size=100, y_size=75) sim.simulate(dax_name, bin_name, 5)
def test_fitz_c_5(): """ Test that fitz_c.wXwYCurveDistance works correctly. """ # Load 3D parameters. settings = storm_analysis.getData("test/data/test_3d_3d.xml") parameters = params.ParametersDAO().initFromFile(settings) [wx_params, wy_params] = parameters.getWidthParams() [min_z, max_z] = parameters.getZRange() pixel_size = parameters.getAttr("pixel_size") # Calculate widths. z_vals = numpy.arange(-250.0, 251.0, 50) [sx, sy] = fitzC.calcSxSy(wx_params, wy_params, z_vals) # Distances should be very close to zero. dist = fitzC.wXwYCurveDistance(wx_params, wy_params, 2.0*sx, 2.0*sy, min_z, max_z, 0.001) assert numpy.allclose(dist, numpy.zeros(sx.size)) # First distance should be larger. sx[0] += 10.0 dist = fitzC.wXwYCurveDistance(wx_params, wy_params, 2.0*sx, 2.0*sy, min_z, max_z, 0.001) expected = numpy.zeros(sx.size) expected[0] = 0.0345862 assert numpy.allclose(dist, expected)
def test_fitz_c_5(): """ Test that fitz_c.wXwYCurveDistance works correctly. """ # Load 3D parameters. settings = storm_analysis.getData("test/data/test_3d_3d.xml") parameters = params.ParametersDAO().initFromFile(settings) [wx_params, wy_params] = parameters.getWidthParams() [min_z, max_z] = parameters.getZRange() pixel_size = parameters.getAttr("pixel_size") # Calculate widths. z_vals = numpy.arange(-250.0, 251.0, 50) [sx, sy] = fitzC.calcSxSy(wx_params, wy_params, z_vals) # Distances should be very close to zero. dist = fitzC.wXwYCurveDistance(wx_params, wy_params, 2.0 * sx, 2.0 * sy, min_z, max_z, 0.001) assert numpy.allclose(dist, numpy.zeros(sx.size)) # First distance should be larger. sx[0] += 10.0 dist = fitzC.wXwYCurveDistance(wx_params, wy_params, 2.0 * sx, 2.0 * sy, min_z, max_z, 0.001) expected = numpy.zeros(sx.size) expected[0] = 0.0345862 assert numpy.allclose(dist, expected)
def test_frc(): mlist_name = storm_analysis.getData("test/data/test_drift_mlist.bin") results_name = storm_analysis.getPathOutputTest("test_drift_frc.txt") from storm_analysis.frc.frc_calc2d import frcCalc2d frcCalc2d(mlist_name, results_name, False)
def test_3ddao_2d_fixed_gt_text(): """ Start fitting from ground truth locations (text file version). """ movie_name = storm_analysis.getData("test/data/test.dax") settings = storm_analysis.getData("test/data/test_3d_2d_fixed_gt_text.xml") mlist = storm_analysis.getPathOutputTest("test_3d_2d_fixed_gt_text.hdf5") storm_analysis.removeFile(mlist) from storm_analysis.daostorm_3d.mufit_analysis import analyze analyze(movie_name, mlist, settings) # Verify number of localizations found. num_locs = veri.verifyNumberLocalizations(mlist) if not veri.verifyIsCloseEnough(num_locs, 200): raise Exception("3D-DAOSTORM 2D fixed ground truth did not find the expected number of localizations.")
def test_3ddao_2d_fixed_non_square(): movie_name = storm_analysis.getData("test/data/test_300x200.dax") settings = storm_analysis.getData("test/data/test_3d_2d_fixed.xml") mlist = storm_analysis.getPathOutputTest("test_3d_2d_300x200.bin") storm_analysis.removeFile(mlist) from storm_analysis.daostorm_3d.mufit_analysis import analyze analyze(movie_name, mlist, settings) # Verify number of localizations found. num_locs = veri.verifyNumberLocalizations(mlist) if not veri.verifyIsCloseEnough(num_locs, 991): raise Exception( "3D-DAOSTORM 2D fixed non square did not find the expected number of localizations." )
def test_voronoi_clustering(): # Test voronoi alist_name = storm_analysis.getData("test/data/test_clustering_list.bin") output_dir = storm_analysis.getPathOutputTest("./") from storm_analysis.voronoi.voronoi_analysis import voronoiAnalysis voronoiAnalysis(alist_name, 0.1, output_dir) # Verify number of clusters found. stats_file = storm_analysis.getPathOutputTest( "test_clustering_srt_stats.txt") n_clusters = len(open(stats_file).readlines()) if (n_clusters != 100): raise Exception( "Voronoi did not identify the expected number of clusters.") # Make pictures. clist_name = storm_analysis.getPathOutputTest( "test_clustering_srt_size_list.bin") image_name = storm_analysis.getPathOutputTest("test_clustering_vr") from storm_analysis.dbscan.cluster_images import clusterImages clusterImages(clist_name, "Voronoi Clustering", 50, 20, image_name, [256, 256])
def test_rcc(): mlist_name = storm_analysis.getData("test/data/test_drift_mlist.bin") drift_name = storm_analysis.getPathOutputTest("test_drift.txt") from storm_analysis.rcc.rcc_drift_correction import rccDriftCorrection rccDriftCorrection(mlist_name, drift_name, 2000, 1, True, False)
def test_scmos_scmos_cal(): """ Test that scmos calibration data is initialized correctly. """ settings = storm_analysis.getData("test/data/test_sc_2d_fixed.xml") parameters = params.ParametersSCMOS().initFromFile(settings) # Create analysis object and reach deep into it.. find_fit = findPeaks.initFindAndFit(parameters) fitter = find_fit.peak_fitter mfitter = fitter.mfitter # Get sCMOS calibration value. scmos_cal_value = mfitter.scmos_cal[0,0] # Initialize with an image. image = numpy.ones(mfitter.scmos_cal.shape) fitter.newImage(image) # Verify that the image has the sCMOS term added. resp = mfitter.getResidual() assert(numpy.max(resp - (1.0 + scmos_cal_value)) < 1.0e-6) # Cleanup. fitter.cleanUp()
def test_scmos_3d(): movie_name = storm_analysis.getData("test/data/test.dax") settings = storm_analysis.getData("test/data/test_sc_3d.xml") mlist = storm_analysis.getPathOutputTest("test_sc_3d.hdf5") storm_analysis.removeFile(mlist) from storm_analysis.sCMOS.scmos_analysis import analyze analyze(movie_name, mlist, settings) # Verify number of localizations found. num_locs = veri.verifyNumberLocalizations(mlist) if not veri.verifyIsCloseEnough(num_locs, 1950): raise Exception("sCMOS 3D did not find the expected number of localizations.") # Verify that the Z values actually got calculated. if not veri.verifyZWasCalculated(mlist): raise Exception("Z values were not calculated for sCMOS 3D fitting.")
def test_3ddao_2d_fixed_gt_text(): """ Start fitting from ground truth locations (text file version). """ movie_name = storm_analysis.getData("test/data/test.dax") settings = storm_analysis.getData("test/data/test_3d_2d_fixed_gt_text.xml") mlist = storm_analysis.getPathOutputTest("test_3d_2d_fixed_gt_text.hdf5") storm_analysis.removeFile(mlist) from storm_analysis.daostorm_3d.mufit_analysis import analyze analyze(movie_name, mlist, settings) # Verify number of localizations found. num_locs = veri.verifyNumberLocalizations(mlist) if not veri.verifyIsCloseEnough(num_locs, 200): raise Exception( "3D-DAOSTORM 2D fixed ground truth did not find the expected number of localizations." )
def test_spliner_std_2D(): # Only test for Python3 due to pickle incompatibility issues. if (sys.version_info < (3, 0)): return movie_name = storm_analysis.getData("test/data/test.dax") settings = storm_analysis.getData("test/data/test_spliner_2D.xml") mlist = storm_analysis.getPathOutputTest("test_spliner_2D.hdf5") storm_analysis.removeFile(mlist) from storm_analysis.spliner.spline_analysis import analyze analyze(movie_name, mlist, settings) # Verify number of localizations found. num_locs = veri.verifyNumberLocalizations(mlist) if not veri.verifyIsCloseEnough(num_locs, 2004): raise Exception( "Spliner 2D did not find the expected number of localizations.")
def test_get_help_1(): """ Test getting help. """ # Load some parameters. original = storm_analysis.getData("test/data/test_3d_2d_fixed.xml") p1 = params.ParametersDAO().initFromFile(original, warnings=True) v1 = p1.helpAttr("max_frame") v1 = p1.helpAttr("convert_to")
def test_scmos_3d(): movie_name = storm_analysis.getData("test/data/test.dax") settings = storm_analysis.getData("test/data/test_sc_3d.xml") mlist = storm_analysis.getPathOutputTest("test_sc_3d.hdf5") storm_analysis.removeFile(mlist) from storm_analysis.sCMOS.scmos_analysis import analyze analyze(movie_name, mlist, settings) # Verify number of localizations found. num_locs = veri.verifyNumberLocalizations(mlist) if not veri.verifyIsCloseEnough(num_locs, 1950): raise Exception( "sCMOS 3D did not find the expected number of localizations.") # Verify that the Z values actually got calculated. if not veri.verifyZWasCalculated(mlist): raise Exception("Z values were not calculated for sCMOS 3D fitting.")
def test_l1h(): # Test L1H. movie_name = storm_analysis.getData("test/data/test_l1h.dax") settings = storm_analysis.getData("test/data/test_l1h.xml") hres = storm_analysis.getPathOutputTest("test_l1h_list.hres") mlist = storm_analysis.getPathOutputTest("test_l1h_list.bin") storm_analysis.removeFile(hres) storm_analysis.removeFile(mlist) from storm_analysis.L1H.cs_analysis import analyze analyze(movie_name, settings, hres, mlist) # Verify number of localizations found. num_locs = veri.verifyNumberLocalizations(mlist) if not veri.verifyIsCloseEnough(num_locs, 1986): raise Exception( "L1H did not find the expected number of localizations.")
def test_get_attr_3(): """ Test getting parameters. """ # Load some parameters. original = storm_analysis.getData("test/data/test_3d_2d_fixed.xml") p1 = params.ParametersDAO().initFromFile(original, warnings=True) v1 = p1.getAttr("start_frame") v1 = p1.getAttr("x_start", 1)
def test_simulate_3(): """ No photo-physics, spline PSF, sCMOS camera. """ # Only test for Python3 due to pickle incompatibility issues. if (sys.version_info < (3, 0)): return dax_name = storm_analysis.getPathOutputTest("test_sim3.dax") bin_name = storm_analysis.getData("test/data/test_sim.hdf5") cal_name = storm_analysis.getData("test/data/calib.npy") spline_name = storm_analysis.getData("test/data/test_spliner_psf.spline") sim = simulate.Simulate(background_factory = lambda settings, xs, ys, i3data : background.UniformBackground(settings, xs, ys, i3data, photons = 20), camera_factory = lambda settings, xs, ys, i3data : camera.SCMOS(settings, xs, ys, i3data, cal_name), photophysics_factory = lambda settings, xs, ys, i3data : photophysics.AlwaysOn(settings, xs, ys, i3data, 2000.0), psf_factory = lambda settings, xs, ys, i3data : psf.Spline(settings, xs, ys, i3data, 160.0, spline_name)) sim.simulate(dax_name, bin_name, 5)
def test_pretty_print_1(): # This just tests that it doesn't fail. It does not check the # formatting of the output file. original = storm_analysis.getData("test/data/test_3d_2d_fixed.xml") output = storm_analysis.getPathOutputTest("test_pp1.xml") # Load parameters. p1 = params.ParametersDAO().initFromFile(original, warnings=True) # Convert back to XML. p1.toXMLFile(output, pretty=True)
def _test_l1h(): # Test L1H. movie_name = storm_analysis.getData("test/data/test_l1h.dax") settings = storm_analysis.getData("test/data/test_l1h.xml") hres = storm_analysis.getPathOutputTest("test_l1h_list.hres") mlist = storm_analysis.getPathOutputTest("test_l1h_list.bin") storm_analysis.removeFile(hres) storm_analysis.removeFile(mlist) from storm_analysis.L1H.cs_analysis import analyze analyze(movie_name, settings, hres, mlist) # Verify number of localizations found. # # FIXME: Change L1H to use the HDF5 format. # num_locs = readinsight3.loadI3File(mlist)["x"].size if not veri.verifyIsCloseEnough(num_locs, 1986): raise Exception("L1H did not find the expected number of localizations.")
def _test_rcc_1(): """ Test RCC drift correction. """ # Calculate drift correction. param_name = storm_analysis.getData("test/data/test_drift.xml") parameters = params.ParametersCommon().initFromFile(param_name) data_name = storm_analysis.getData("test/data/test_drift.hdf5") h5_name = storm_analysis.getPathOutputTest("test_rcc_hdf5.hdf5") # Make a copy of the original as it will get modified and then # git will pick this up. shutil.copyfile(data_name, h5_name) drift_output = storm_analysis.getPathOutputTest("test_drift_drift.txt") [min_z, max_z] = parameters.getZRange() rccDriftCorrection.rccDriftCorrection(h5_name, drift_output, parameters.getAttr("frame_step"), parameters.getAttr("d_scale"), min_z, max_z, True) # Verify results. diffs = veri.verifyDriftCorrection(storm_analysis.getData("test/data/test_drift.txt"), drift_output) if (diffs[0] > 0.1): raise Exception("Frame numbers do not match.") # These thresholds are somewhat arbitrary. if (diffs[1] > 0.1) or (diffs[2] > 0.1): raise Exception("XY drift correction error.") if (diffs[3] > 0.03): raise Exception("Z drift correction error.")
def _test_l1h(): # Test L1H. movie_name = storm_analysis.getData("test/data/test_l1h.dax") settings = storm_analysis.getData("test/data/test_l1h.xml") hres = storm_analysis.getPathOutputTest("test_l1h_list.hres") mlist = storm_analysis.getPathOutputTest("test_l1h_list.bin") storm_analysis.removeFile(hres) storm_analysis.removeFile(mlist) from storm_analysis.L1H.cs_analysis import analyze analyze(movie_name, settings, hres, mlist) # Verify number of localizations found. # # FIXME: Change L1H to use the HDF5 format. # num_locs = readinsight3.loadI3File(mlist)["x"].size if not veri.verifyIsCloseEnough(num_locs, 1986): raise Exception( "L1H did not find the expected number of localizations.")
def test_rcc(): # Calculate drift correction. mlist_name = storm_analysis.getData("test/data/test_drift_mlist.bin") drift_name = storm_analysis.getPathOutputTest("test_rcc_drift.txt") from storm_analysis.rcc.rcc_drift_correction import rccDriftCorrection rccDriftCorrection(mlist_name, drift_name, 1000, 1, True, False) # Verify results. diffs = veri.verifyDriftCorrection( storm_analysis.getData("test/data/test_drift.txt"), drift_name) if (diffs[0] > 0.1): raise Exception("Frame numbers do not match.") # These thresholds are somewhat arbitrary. if (diffs[1] > 0.1) or (diffs[2] > 0.1): raise Exception("XY drift correction error.") if (diffs[3] > 30.0): raise Exception("Z drift correction error.")
def test_voronoi_clustering(): # Test voronoi alist_name = storm_analysis.getData("test/data/test_drift_alist.bin") output_dir = storm_analysis.getPathOutputTest("./") from storm_analysis.voronoi.voronoi_analysis import voronoiAnalysis voronoiAnalysis(alist_name, 1.25, output_dir) clist_name = storm_analysis.getPathOutputTest("test_drift_asrt_size_list.bin") image_name = storm_analysis.getPathOutputTest("test_drift_vr") from storm_analysis.dbscan.cluster_images import clusterImages clusterImages(clist_name, "Voronoi Clustering", 50, 20, image_name, [256, 256])
def test_simulate_1(): """ No photo-physics, simple PSF, ideal camera. """ dax_name = storm_analysis.getPathOutputTest("test_sim1.dax") bin_name = storm_analysis.getData("test/data/test_sim.hdf5") sim = simulate.Simulate(background_factory = lambda settings, xs, ys, i3data : background.UniformBackground(settings, xs, ys, i3data), camera_factory = lambda settings, xs, ys, i3data : camera.Ideal(settings, xs, ys, i3data, 100.0), photophysics_factory = lambda settings, xs, ys, i3data : photophysics.AlwaysOn(settings, xs, ys, i3data, 1000.0), psf_factory = lambda settings, xs, ys, i3data : psf.GaussianPSF(settings, xs, ys, i3data, 160.0), x_size = 100, y_size = 75) sim.simulate(dax_name, bin_name, 5)
def test_simulate_2(): """ (Simple) STORM photo-physics, pure astigmatism PSF, EMCCD camera. """ dax_name = storm_analysis.getPathOutputTest("test_sim2.dax") bin_name = storm_analysis.getData("test/data/test_sim.hdf5") sim = simulate.Simulate(background_factory = lambda settings, xs, ys, i3data : background.UniformBackground(settings, xs, ys, i3data, photons = 20), camera_factory = lambda settings, xs, ys, i3data : camera.EMCCD(settings, xs, ys, i3data, 100.0, emccd_gain = 5.0, preamp_gain = 1.0, read_noise = 5), photophysics_factory = lambda settings, xs, ys, i3data : photophysics.SimpleSTORM(settings, xs, ys, i3data, 4000.0, off_time = 10.0), psf_factory = lambda settings, xs, ys, i3data : psf.PupilFunction(settings, xs, ys, i3data, 160.0, [[1.3, 2, 2]]), x_size = 100, y_size = 75) sim.simulate(dax_name, bin_name, 5)
def test_get_help_2(): """ Test getting help with a parameter that does not exist. """ # Load some parameters. original = storm_analysis.getData("test/data/test_3d_2d_fixed.xml") p1 = params.ParametersDAO().initFromFile(original, warnings=True) try: v1 = p1.helpAttr("foo") except params.ParametersException: return assert False
def test_get_attr_2(): """ Test no such parameter with default value. """ # Load some parameters. original = storm_analysis.getData("test/data/test_3d_2d_fixed.xml") p1 = params.ParametersDAO().initFromFile(original, warnings=True) try: v1 = p1.getAttr("foo", default="bar") except params.ParametersException: return assert False
def test_psf_spline3D_1(): """ Test that spline PSF agrees with spliner (for 0.0 offset). """ # Only test for Python3 due to pickle incompatibility issues. if (sys.version_info < (3, 0)): return spline_name = storm_analysis.getData("test/data/test_spliner_psf.spline") psf_sp_3d = psf.Spline3D(spline_name) sp_3d = splineToPSF.SplineToPSF3D(spline_name) psf_im = psf_sp_3d.getPSF(0.1, 0.0, 0.0) sp_im = sp_3d.getPSF(0.1, normalize = False) assert numpy.allclose(psf_im, sp_im)
def test_dbscan_clustering(): # Test dbscan import shutil # Copy alist to the output directory so that the DBSCAN results end up in the right place. alist_data = storm_analysis.getData("test/data/test_drift_alist.bin") alist_output = storm_analysis.getPathOutputTest("test_drift_alist.bin") shutil.copyfile(alist_data, alist_output) from storm_analysis.dbscan.dbscan_analysis import dbscanAnalysis dbscanAnalysis(alist_output, 0) clist_name = storm_analysis.getPathOutputTest("test_drift_aclusters_size_list.bin") image_name = storm_analysis.getPathOutputTest("test_drift_db") from storm_analysis.dbscan.cluster_images import clusterImages clusterImages(clist_name, "DBSCAN Clustering", 50, 20, image_name, [256, 256])
def test_fitz_c_4(): """ Test that tracks with wx, wy values that are not near the calibration curve are assigned z values less than z minimum. Their category remains unchanged as this is done in a separate step. """ # Load 3D parameters. settings = storm_analysis.getData("test/data/test_3d_3d.xml") parameters = params.ParametersDAO().initFromFile(settings) [wx_params, wy_params] = parameters.getWidthParams() [min_z, max_z] = parameters.getZRange() pixel_size = parameters.getAttr("pixel_size") # Calculate widths. z_vals = numpy.arange(-250.0, 251.0, 50) [sx, sy] = fitzC.calcSxSy(wx_params, wy_params, z_vals) # Create HDF5 file with these widths. track_length = numpy.ones(sx.size) track_length[:2] = 2 tracks = {"category" : numpy.ones(sx.size, dtype = numpy.int32), "track_length" : track_length, "x" : numpy.zeros(sx.size), "xsigma" : track_length*(sx/pixel_size + numpy.ones(sx.size)), "ysigma" : track_length*(sy/pixel_size + numpy.ones(sx.size))} h5_name = storm_analysis.getPathOutputTest("test_sa_hdf5.hdf5") storm_analysis.removeFile(h5_name) with saH5Py.SAH5Py(h5_name, is_existing = False) as h5: h5.setMovieInformation(256, 256, 10, "XYZZY") h5.setPixelSize(pixel_size) h5.addTracks(tracks) # Calculate Z values. fitzC.fitzTracks(h5_name, 1.5, wx_params, wy_params, min_z, max_z, 1.0e-3) # Check Z values. with saH5Py.SAH5Py(h5_name) as h5: for tracks in h5.tracksIterator(): assert(numpy.allclose(tracks["z"], min_z*numpy.ones(sx.size)-1.0e-3)) assert(numpy.allclose(tracks["category"], numpy.ones(sx.size)))
def test_psf_2D_dy(): # Only test for Python3 due to pickle incompatibility issues. if (sys.version_info < (3, 0)): return spline_filename = storm_analysis.getData("test/data/test_spliner_psf_2d.spline") with open(spline_filename, "rb") as fp: spline_data = pickle.load(fp) py_spline = spline2D.Spline2D(spline_data["spline"], spline_data["coeff"]) c_spline = cubicSplineC.CSpline2D(py_spline) size = py_spline.getSize() - 1.0e-6 for i in range(reps): x = random.uniform(1.0e-6, size) y = random.uniform(1.0e-6, size) #print("{0:.3f} {1:.3f}".format(py_spline.dyf(x, y), c_spline.dyf(x, y))) assert (abs(py_spline.dyf(x, y) - c_spline.dyf(x, y)) < 1.0e-6)
def test_std_analysis_1(): """ Test zCheck. """ # Load 3D parameters. settings = storm_analysis.getData("test/data/test_3d_3d.xml") parameters = params.ParametersDAO().initFromFile(settings) [min_z, max_z] = parameters.getZRange() assert(abs(min_z + 0.5) < 1.0e-6) assert(abs(max_z - 0.5) < 1.0e-6) # Create HDF5 file with localizations and tracks. zvals = numpy.arange(-1.0, 1.05, 0.2) peaks = {"category" : numpy.ones(zvals.size, dtype = numpy.int32), "x" : numpy.zeros(zvals.size), "z" : zvals} h5_name = storm_analysis.getPathOutputTest("test_sa_hdf5.hdf5") storm_analysis.removeFile(h5_name) with saH5Py.SAH5Py(h5_name, is_existing = False) as h5: h5.setMovieInformation(256, 256, 10, "XYZZY") h5.addLocalizations(peaks, 1) h5.addTracks(peaks) # Run z check on the file. stdAnalysis.zCheck(h5_name, parameters) # Check track and localization categories. category = numpy.ones(zvals.size, dtype = numpy.int32) z_mask = (zvals < min_z) | (zvals > max_z) category[z_mask] = 9 with saH5Py.SAH5Py(h5_name) as h5: for fnum, locs in h5.localizationsIterator(fields = ["category"]): assert(numpy.allclose(locs["category"], category)) for tracks in h5.tracksIterator(fields = ["category"]): assert(numpy.allclose(tracks["category"], category))