示例#1
0
def analyzeData():
    dirs = sorted(glob.glob("test*"))
    total_time = 0.0
    for a_dir in dirs:
        print()
        print("Analyzing:", a_dir)
        print()
    
        hdf5 = a_dir + "/test.hdf5"

        # Do tracking and drift correction using the ground truth positions.
        if True:
            shutil.copyfile(a_dir + "/test_ref.hdf5", hdf5)

        # Run analysis.
        #
        # This will just do the drift correction as it will see that the
        # movie has been completely analyzed.
        #
        start_time = time.time()
        dao3d.analyze(a_dir + "/test.dax", hdf5, "dao.xml")
        stop_time = time.time()

        # Save timing results.
        total_time += stop_time - start_time
        print("Analysis completed in {0:.2f} seconds.".format(stop_time - start_time))

        with open(a_dir + "/timing.txt", "w") as fp:
            fp.write(str(stop_time - start_time) + "\n")

    print()
    print("{0:d} directories analyzed in {1:.2f} seconds.".format(len(dirs), total_time))
示例#2
0
def analyzeData():
    dirs = sorted(glob.glob("test*"))
    total_time = 0.0
    for a_dir in dirs:
        print()
        print("Analyzing:", a_dir)
        print()

        hdf5 = a_dir + "/test.hdf5"

        # Do tracking and drift correction using the ground truth positions.
        if True:
            shutil.copyfile(a_dir + "/test_ref.hdf5", hdf5)

        # Run analysis.
        #
        # This will just do the drift correction as it will see that the
        # movie has been completely analyzed.
        #
        start_time = time.time()
        dao3d.analyze(a_dir + "/test.dax", hdf5, "dao.xml")
        stop_time = time.time()

        # Save timing results.
        total_time += stop_time - start_time
        print("Analysis completed in {0:.2f} seconds.".format(stop_time -
                                                              start_time))

        with open(a_dir + "/timing.txt", "w") as fp:
            fp.write(str(stop_time - start_time) + "\n")

    print()
    print("{0:d} directories analyzed in {1:.2f} seconds.".format(
        len(dirs), total_time))
示例#3
0
def analyzeData():
    
    dirs = sorted(glob.glob("test*"))
    total_time = 0.0
    for a_dir in dirs:
        print()
        print("Analyzing:", a_dir)
        print()
    
        hdf5 = a_dir + "/test.hdf5"
    
        # Remove stale results, if any.
        if os.path.exists(hdf5):
            os.remove(hdf5)

        # Run analysis.
        start_time = time.time()
        dao3d.analyze(a_dir + "/test.dax", hdf5, "dao.xml")
        stop_time = time.time()

        # Save timing results.
        total_time += stop_time - start_time
        print("Analysis completed in {0:.2f} seconds.".format(stop_time - start_time))

        with open(a_dir + "/timing.txt", "w") as fp:
            fp.write(str(stop_time - start_time) + "\n")

    print()
    print("{0:d} directories analyzed in {1:.2f} seconds.".format(len(dirs), total_time))
示例#4
0
def test_3ddao_Z():

    movie_name = storm_analysis.getData("test/data/test.dax")
    settings = storm_analysis.getData("test/data/test_3d_Z.xml")
    mlist = storm_analysis.getPathOutputTest("test_3d_Z.bin")
    storm_analysis.removeFile(mlist)

    from storm_analysis.daostorm_3d.mufit_analysis import analyze
    analyze(movie_name, mlist, settings)
示例#5
0
def test_3ddao_2d_fixed_non_square():

    movie_name = storm_analysis.getData("test/data/test_300x200.dax")
    settings = storm_analysis.getData("test/data/test_3d_2d_fixed.xml")
    mlist = storm_analysis.getPathOutputTest("test_3d_2d_300x200.bin")
    storm_analysis.removeFile(mlist)

    from storm_analysis.daostorm_3d.mufit_analysis import analyze
    analyze(movie_name, mlist, settings)
示例#6
0
def test_3ddao_2d():

    movie_name = storm_analysis.getData("test/data/test.dax")
    settings = storm_analysis.getData("test/data/test_3d_2d.xml")
    mlist = storm_analysis.getPathOutputTest("test_3d_2d.hdf5")
    storm_analysis.removeFile(mlist)

    from storm_analysis.daostorm_3d.mufit_analysis import analyze
    analyze(movie_name, mlist, settings)

    # Verify number of localizations found.
    num_locs = veri.verifyNumberLocalizations(mlist)
    if not veri.verifyIsCloseEnough(num_locs, 1970):
        raise Exception("3D-DAOSTORM 2D did not find the expected number of localizations.")
示例#7
0
def test_3ddao_Z():

    movie_name = storm_analysis.getData("test/data/test.dax")
    settings = storm_analysis.getData("test/data/test_3d_Z.xml")
    mlist = storm_analysis.getPathOutputTest("test_3d_Z.hdf5")
    storm_analysis.removeFile(mlist)

    from storm_analysis.daostorm_3d.mufit_analysis import analyze
    analyze(movie_name, mlist, settings)

    # Verify number of localizations found.
    num_locs = veri.verifyNumberLocalizations(mlist)
    if not veri.verifyIsCloseEnough(num_locs, 1955):
        raise Exception(
            "3D-DAOSTORM Z did not find the expected number of localizations.")
示例#8
0
def test_3ddao_2d_fixed_non_square():

    movie_name = storm_analysis.getData("test/data/test_300x200.dax")
    settings = storm_analysis.getData("test/data/test_3d_2d_fixed.xml")
    mlist = storm_analysis.getPathOutputTest("test_3d_2d_300x200.bin")
    storm_analysis.removeFile(mlist)

    from storm_analysis.daostorm_3d.mufit_analysis import analyze
    analyze(movie_name, mlist, settings)

    # Verify number of localizations found.
    num_locs = veri.verifyNumberLocalizations(mlist)
    if not veri.verifyIsCloseEnough(num_locs, 991):
        raise Exception(
            "3D-DAOSTORM 2D fixed non square did not find the expected number of localizations."
        )
示例#9
0
def test_3ddao_2d_fixed_gt_text():
    """
    Start fitting from ground truth locations (text file version).
    """
    movie_name = storm_analysis.getData("test/data/test.dax")
    settings = storm_analysis.getData("test/data/test_3d_2d_fixed_gt_text.xml")
    mlist = storm_analysis.getPathOutputTest("test_3d_2d_fixed_gt_text.hdf5")
    storm_analysis.removeFile(mlist)

    from storm_analysis.daostorm_3d.mufit_analysis import analyze
    analyze(movie_name, mlist, settings)

    # Verify number of localizations found.
    num_locs = veri.verifyNumberLocalizations(mlist)
    if not veri.verifyIsCloseEnough(num_locs, 200):
        raise Exception("3D-DAOSTORM 2D fixed ground truth did not find the expected number of localizations.")    
示例#10
0
def test_3ddao_2d_fixed_gt_text():
    """
    Start fitting from ground truth locations (text file version).
    """
    movie_name = storm_analysis.getData("test/data/test.dax")
    settings = storm_analysis.getData("test/data/test_3d_2d_fixed_gt_text.xml")
    mlist = storm_analysis.getPathOutputTest("test_3d_2d_fixed_gt_text.hdf5")
    storm_analysis.removeFile(mlist)

    from storm_analysis.daostorm_3d.mufit_analysis import analyze
    analyze(movie_name, mlist, settings)

    # Verify number of localizations found.
    num_locs = veri.verifyNumberLocalizations(mlist)
    if not veri.verifyIsCloseEnough(num_locs, 200):
        raise Exception(
            "3D-DAOSTORM 2D fixed ground truth did not find the expected number of localizations."
        )
示例#11
0
dirs = sorted(glob.glob("test*"))
total_time = 0.0
for a_dir in dirs:
    print()
    print("Analyzing:", a_dir)
    print()

    hdf5 = a_dir + "/test.hdf5"

    # Do tracking and drift correction using the ground truth positions.
    if True:
        shutil.copyfile(a_dir + "/test_ref.hdf5", hdf5)

    # Run analysis.
    start_time = time.time()
    dao3d.analyze(a_dir + "/test.dax", hdf5, "dao.xml")
    stop_time = time.time()

    # Save timing results.
    total_time += stop_time - start_time
    print("Analysis completed in {0:.2f} seconds.".format(stop_time -
                                                          start_time))

    with open(a_dir + "/timing.txt", "w") as fp:
        fp.write(str(stop_time - start_time) + "\n")

print()
print("{0:d} directories analyzed in {1:.2f} seconds.".format(
    len(dirs), total_time))