def analyzeData(): dirs = sorted(glob.glob("test*")) for a_dir in dirs: print() print("Analyzing:", a_dir) print() mlist = a_dir + "/test.hdf5" # Remove stale results, if any. if os.path.exists(mlist): os.remove(mlist) # Run analysis. start_time = time.time() sp_ana.analyze(a_dir + "/test.tif", mlist, "spliner.xml") stop_time = time.time() # Save timing results. print("Analysis completed in {0:.2f} seconds".format(stop_time - start_time)) with open(a_dir + "/timing.txt", "w") as fp: fp.write(str(stop_time - start_time) + "\n")
def analyzeData(): dirs = sorted(glob.glob("test*")) total_time = 0.0 for a_dir in dirs: print() print("Analyzing:", a_dir) print() mlist = a_dir + "/test.hdf5" # Remove stale results, if any. if os.path.exists(mlist): os.remove(mlist) # Run analysis. start_time = time.time() sp_ana.analyze(a_dir + "/test.dax", mlist, "fdecon.xml") stop_time = time.time() # Save timing results. total_time += stop_time - start_time print("Analysis completed in {0:.2f} seconds".format(stop_time - start_time)) with open(a_dir + "/timing.txt", "w") as fp: fp.write(str(stop_time - start_time) + "\n") print() print("{0:d} directories analyzed in {1:.2f} seconds.".format(len(dirs), total_time))
def test_spliner_std_2D(): # Only test for Python3 due to pickle incompatibility issues. if (sys.version_info < (3, 0)): return movie_name = storm_analysis.getData("test/data/test.dax") settings = storm_analysis.getData("test/data/test_spliner_2D.xml") mlist = storm_analysis.getPathOutputTest("test_spliner_2D.hdf5") storm_analysis.removeFile(mlist) from storm_analysis.spliner.spline_analysis import analyze analyze(movie_name, mlist, settings) # Verify number of localizations found. num_locs = veri.verifyNumberLocalizations(mlist) if not veri.verifyIsCloseEnough(num_locs, 2004): raise Exception( "Spliner 2D did not find the expected number of localizations.")