Example #1
0
def test_IO_obj_dict(filename):
    """
    test obj_dict hardlink capability
    """

    from etrack.reconstruction import evaluation

    # first, check a single object which is listed in obj_dict
    alg_results = evaluation.generate_random_alg_results(length=10000)
    alg_results.parent = [alg_results]
    with h5py.File(filename, 'w') as h5file:
        write_object_to_hdf5(
            alg_results, h5file, 'alg_results')
    with h5py.File(filename, 'r') as h5file:
        ar2 = read_object_from_hdf5(
            h5file['alg_results'])
    assert ar2['parent'][0] is ar2
    os.remove(filename)

    # check alpha_unc and beta_unc
    alg_results.add_default_uncertainties()
    alg_results.parent = [alg_results]
    with h5py.File(filename, 'w') as h5file:
        write_object_to_hdf5(
            alg_results, h5file, 'alg_results')
    with h5py.File(filename, 'r') as h5file:
        ar2 = read_object_from_hdf5(
            h5file['alg_results'])
    assert ar2['parent'][0] is ar2
    assert ar2['alpha_unc'] is ar2['uncertainty_list'][0]
    assert ar2['beta_unc'] is ar2['uncertainty_list'][1]
    os.remove(filename)

    # check obj_dict with multiple objects written to the same file
    # (has to be written in the same file session)
    ar1 = evaluation.generate_random_alg_results(length=10000)
    ar1.add_default_uncertainties()
    ar2 = evaluation.generate_random_alg_results(length=1000)
    ar2.add_default_uncertainties()

    pyobj_to_h5 = {}
    with h5py.File(filename, 'a') as h5file:
        write_object_to_hdf5(ar1, h5file, 'ar1', pyobj_to_h5=pyobj_to_h5)
        write_object_to_hdf5(ar2, h5file, 'ar2', pyobj_to_h5=pyobj_to_h5)
        # should be hardlinked
        write_object_to_hdf5(ar1, h5file, 'ar3', pyobj_to_h5=pyobj_to_h5)

    h5_to_pydict = {}
    with h5py.File(filename, 'r') as h5file:
        ar1r = read_object_from_hdf5(h5file['ar1'], h5_to_pydict=h5_to_pydict)
        ar2r = read_object_from_hdf5(h5file['ar2'], h5_to_pydict=h5_to_pydict)
        ar3r = read_object_from_hdf5(h5file['ar3'], h5_to_pydict=h5_to_pydict)
    check_alg_results_IO(ar1r, ar1, uncertainty_flag=True)
    check_alg_results_IO(ar2r, ar2, uncertainty_flag=True)
    check_alg_results_IO(ar3r, ar1, uncertainty_flag=True)
    # this is the hardlink test:
    assert ar1r is ar3r
    os.remove(filename)
Example #2
0
def test_IO_obj_dict(filename):
    """
    test obj_dict hardlink capability
    """

    from etrack.reconstruction import evaluation

    # first, check a single object which is listed in obj_dict
    alg_results = evaluation.generate_random_alg_results(length=10000)
    alg_results.parent = [alg_results]
    with h5py.File(filename, 'w') as h5file:
        write_object_to_hdf5(alg_results, h5file, 'alg_results')
    with h5py.File(filename, 'r') as h5file:
        ar2 = read_object_from_hdf5(h5file['alg_results'])
    assert ar2['parent'][0] is ar2
    os.remove(filename)

    # check alpha_unc and beta_unc
    alg_results.add_default_uncertainties()
    alg_results.parent = [alg_results]
    with h5py.File(filename, 'w') as h5file:
        write_object_to_hdf5(alg_results, h5file, 'alg_results')
    with h5py.File(filename, 'r') as h5file:
        ar2 = read_object_from_hdf5(h5file['alg_results'])
    assert ar2['parent'][0] is ar2
    assert ar2['alpha_unc'] is ar2['uncertainty_list'][0]
    assert ar2['beta_unc'] is ar2['uncertainty_list'][1]
    os.remove(filename)

    # check obj_dict with multiple objects written to the same file
    # (has to be written in the same file session)
    ar1 = evaluation.generate_random_alg_results(length=10000)
    ar1.add_default_uncertainties()
    ar2 = evaluation.generate_random_alg_results(length=1000)
    ar2.add_default_uncertainties()

    pyobj_to_h5 = {}
    with h5py.File(filename, 'a') as h5file:
        write_object_to_hdf5(ar1, h5file, 'ar1', pyobj_to_h5=pyobj_to_h5)
        write_object_to_hdf5(ar2, h5file, 'ar2', pyobj_to_h5=pyobj_to_h5)
        # should be hardlinked
        write_object_to_hdf5(ar1, h5file, 'ar3', pyobj_to_h5=pyobj_to_h5)

    h5_to_pydict = {}
    with h5py.File(filename, 'r') as h5file:
        ar1r = read_object_from_hdf5(h5file['ar1'], h5_to_pydict=h5_to_pydict)
        ar2r = read_object_from_hdf5(h5file['ar2'], h5_to_pydict=h5_to_pydict)
        ar3r = read_object_from_hdf5(h5file['ar3'], h5_to_pydict=h5_to_pydict)
    check_alg_results_IO(ar1r, ar1, uncertainty_flag=True)
    check_alg_results_IO(ar2r, ar2, uncertainty_flag=True)
    check_alg_results_IO(ar3r, ar1, uncertainty_flag=True)
    # this is the hardlink test:
    assert ar1r is ar3r
    os.remove(filename)
Example #3
0
    def test_IO_user_objects(filename):
        """
        single user object
        multi-level user objects
        """

        # don't import at top of file! circular import with evaluation.py
        from etrack.reconstruction import evaluation

        # Real Classes:
        # single user-defined object
        alg_results = evaluation.generate_random_alg_results(length=10000)
        with h5py.File(filename, 'w') as h5file:
            write_object_to_hdf5(
                alg_results, h5file, 'alg_results')
        with h5py.File(filename, 'r') as h5file:
            ar2 = read_object_from_hdf5(h5file['alg_results'])
        check_alg_results_IO(ar2, alg_results, uncertainty_flag=False)
        os.remove(filename)

        # multi-level object
        alg_results.add_default_uncertainties()
        with h5py.File(filename, 'w') as h5file:
            write_object_to_hdf5(
                alg_results, h5file, 'alg_results')
        with h5py.File(filename, 'r') as h5file:
            ar2 = read_object_from_hdf5(h5file['alg_results'])
        check_alg_results_IO(ar2, alg_results, uncertainty_flag=True)
        os.remove(filename)
Example #4
0
    def test_IO_user_objects(filename):
        """
        single user object
        multi-level user objects
        """

        # don't import at top of file! circular import with evaluation.py
        from etrack.reconstruction import evaluation

        # Real Classes:
        # single user-defined object
        alg_results = evaluation.generate_random_alg_results(length=10000)
        with h5py.File(filename, 'w') as h5file:
            write_object_to_hdf5(alg_results, h5file, 'alg_results')
        with h5py.File(filename, 'r') as h5file:
            ar2 = read_object_from_hdf5(h5file['alg_results'])
        check_alg_results_IO(ar2, alg_results, uncertainty_flag=False)
        os.remove(filename)

        # multi-level object
        alg_results.add_default_uncertainties()
        with h5py.File(filename, 'w') as h5file:
            write_object_to_hdf5(alg_results, h5file, 'alg_results')
        with h5py.File(filename, 'r') as h5file:
            ar2 = read_object_from_hdf5(h5file['alg_results'])
        check_alg_results_IO(ar2, alg_results, uncertainty_flag=True)
        os.remove(filename)
Example #5
0
def test_IO_overwrite(filename):
    """
    test the ability to overwrite objects in an existing HDF5 file
    """

    from etrack.reconstruction import evaluation

    # simple overwrite
    alg_results = evaluation.generate_random_alg_results(length=10000)
    alg_results.add_default_uncertainties()
    with h5py.File(filename, 'a') as h5file:
        write_object_to_hdf5(
            alg_results, h5file, 'alg_results')
    with h5py.File(filename, 'a') as h5file:
        write_object_to_hdf5(
            alg_results, h5file, 'alg_results')
    with h5py.File(filename, 'r') as h5file:
        ar2 = read_object_from_hdf5(h5file['alg_results'])
    check_alg_results_IO(ar2, alg_results, uncertainty_flag=True)
    os.remove(filename)

    # writing two objects to the same file
    ar1 = evaluation.generate_random_alg_results(length=10000)
    ar1.add_default_uncertainties()
    ar2 = evaluation.generate_random_alg_results(length=1000)
    ar2.add_default_uncertainties()
    with h5py.File(filename, 'a') as h5file:
        write_object_to_hdf5(ar1, h5file, 'ar1')
        write_object_to_hdf5(ar2, h5file, 'ar2')
    with h5py.File(filename, 'r') as h5file:
        ar1r = read_object_from_hdf5(h5file['ar1'])
        ar2r = read_object_from_hdf5(h5file['ar2'])
    check_alg_results_IO(ar1r, ar1, uncertainty_flag=True)
    check_alg_results_IO(ar2r, ar2, uncertainty_flag=True)

    # overwriting just one of the two objects
    ar3 = evaluation.generate_random_alg_results(length=1000)
    ar3.add_default_uncertainties()
    with h5py.File(filename, 'a') as h5file:
        write_object_to_hdf5(ar3, h5file, 'ar1')
    with h5py.File(filename, 'r') as h5file:
        ar1r = read_object_from_hdf5(h5file['ar1'])
        ar2r = read_object_from_hdf5(h5file['ar2'])
    check_alg_results_IO(ar1r, ar3, uncertainty_flag=True)
    check_alg_results_IO(ar2r, ar2, uncertainty_flag=True)
    os.remove(filename)
Example #6
0
def test_IO_overwrite(filename):
    """
    test the ability to overwrite objects in an existing HDF5 file
    """

    from etrack.reconstruction import evaluation

    # simple overwrite
    alg_results = evaluation.generate_random_alg_results(length=10000)
    alg_results.add_default_uncertainties()
    with h5py.File(filename, 'a') as h5file:
        write_object_to_hdf5(alg_results, h5file, 'alg_results')
    with h5py.File(filename, 'a') as h5file:
        write_object_to_hdf5(alg_results, h5file, 'alg_results')
    with h5py.File(filename, 'r') as h5file:
        ar2 = read_object_from_hdf5(h5file['alg_results'])
    check_alg_results_IO(ar2, alg_results, uncertainty_flag=True)
    os.remove(filename)

    # writing two objects to the same file
    ar1 = evaluation.generate_random_alg_results(length=10000)
    ar1.add_default_uncertainties()
    ar2 = evaluation.generate_random_alg_results(length=1000)
    ar2.add_default_uncertainties()
    with h5py.File(filename, 'a') as h5file:
        write_object_to_hdf5(ar1, h5file, 'ar1')
        write_object_to_hdf5(ar2, h5file, 'ar2')
    with h5py.File(filename, 'r') as h5file:
        ar1r = read_object_from_hdf5(h5file['ar1'])
        ar2r = read_object_from_hdf5(h5file['ar2'])
    check_alg_results_IO(ar1r, ar1, uncertainty_flag=True)
    check_alg_results_IO(ar2r, ar2, uncertainty_flag=True)

    # overwriting just one of the two objects
    ar3 = evaluation.generate_random_alg_results(length=1000)
    ar3.add_default_uncertainties()
    with h5py.File(filename, 'a') as h5file:
        write_object_to_hdf5(ar3, h5file, 'ar1')
    with h5py.File(filename, 'r') as h5file:
        ar1r = read_object_from_hdf5(h5file['ar1'])
        ar2r = read_object_from_hdf5(h5file['ar2'])
    check_alg_results_IO(ar1r, ar3, uncertainty_flag=True)
    check_alg_results_IO(ar2r, ar2, uncertainty_flag=True)
    os.remove(filename)
Example #7
0
def test_write_objects_to_hdf5():
    """
    test the multiple-object form of writing
    """

    from etrack.reconstruction import evaluation

    filename = generate_random_filename(ext='')
    filename_h5 = filename + '.h5'
    filename_hdf5 = filename + '.hdf5'

    # h5file provided
    # single object
    ar = evaluation.generate_random_alg_results(length=1000)
    ar.add_default_uncertainties()
    with h5py.File(filename_h5, 'a') as h5file:
        write_objects_to_hdf5(h5file, ar=ar)
    with h5py.File(filename_h5, 'r') as h5file:
        ar_read = read_object_from_hdf5(h5file['ar'])
    check_alg_results_IO(ar_read, ar, uncertainty_flag=True)
    os.remove(filename_h5)

    # h5file provided
    # multiple objects
    ar1 = evaluation.generate_random_alg_results(length=1000)
    ar1.add_default_uncertainties()
    ar2 = evaluation.generate_random_alg_results(length=2000)
    ar2.add_default_uncertainties()
    ar3 = evaluation.generate_random_alg_results(length=3000)
    ar3.add_default_uncertainties()
    with h5py.File(filename_h5, 'a') as h5file:
        filename_written = write_objects_to_hdf5(
            h5file,
            ar1=ar1, ar2=ar2, ar3=ar3, aunc=ar1.alpha_unc)
    assert filename_written == filename_h5

    h5_to_pydict = {}
    with h5py.File(filename_h5, 'r') as h5file:
        ar1_read = read_object_from_hdf5(
            h5file['ar1'], h5_to_pydict=h5_to_pydict)
        ar2_read = read_object_from_hdf5(
            h5file['ar2'], h5_to_pydict=h5_to_pydict)
        ar3_read = read_object_from_hdf5(
            h5file['ar3'], h5_to_pydict=h5_to_pydict)
        aunc = read_object_from_hdf5(h5file['aunc'], h5_to_pydict=h5_to_pydict)
    check_alg_results_IO(ar1_read, ar1, uncertainty_flag=True)
    check_alg_results_IO(ar2_read, ar2, uncertainty_flag=True)
    check_alg_results_IO(ar3_read, ar3, uncertainty_flag=True)
    # check hard link across multiple write calls (within a file session)
    assert aunc is ar1_read['alpha_unc']
    os.remove(filename_h5)

    # filename provided, including extension (single object)
    filename_written = write_objects_to_hdf5(filename_h5, ar=ar)
    assert filename_written == filename_h5
    with h5py.File(filename_h5, 'r') as h5file:
        ar_read = read_object_from_hdf5(h5file['ar'])
    check_alg_results_IO(ar_read, ar, uncertainty_flag=True)
    os.remove(filename_h5)

    # filename provided as *.hdf5
    filename_written = write_objects_to_hdf5(filename_hdf5, ar=ar)
    assert filename_written == filename_hdf5
    with h5py.File(filename_hdf5, 'r') as h5file:
        ar_read = read_object_from_hdf5(h5file['ar'])
    check_alg_results_IO(ar_read, ar, uncertainty_flag=True)
    os.remove(filename_hdf5)

    # filename provided without extension. check that extension is added
    filename_written = write_objects_to_hdf5(filename, ar=ar)
    assert filename_written == filename_h5
    with h5py.File(filename_h5, 'r') as h5file:
        ar_read = read_object_from_hdf5(h5file['ar'])
    check_alg_results_IO(ar_read, ar, uncertainty_flag=True)
    os.remove(filename_h5)
Example #8
0
def test_write_objects_to_hdf5():
    """
    test the multiple-object form of writing
    """

    from etrack.reconstruction import evaluation

    filename = generate_random_filename(ext='')
    filename_h5 = filename + '.h5'
    filename_hdf5 = filename + '.hdf5'

    # h5file provided
    # single object
    ar = evaluation.generate_random_alg_results(length=1000)
    ar.add_default_uncertainties()
    with h5py.File(filename_h5, 'a') as h5file:
        write_objects_to_hdf5(h5file, ar=ar)
    with h5py.File(filename_h5, 'r') as h5file:
        ar_read = read_object_from_hdf5(h5file['ar'])
    check_alg_results_IO(ar_read, ar, uncertainty_flag=True)
    os.remove(filename_h5)

    # h5file provided
    # multiple objects
    ar1 = evaluation.generate_random_alg_results(length=1000)
    ar1.add_default_uncertainties()
    ar2 = evaluation.generate_random_alg_results(length=2000)
    ar2.add_default_uncertainties()
    ar3 = evaluation.generate_random_alg_results(length=3000)
    ar3.add_default_uncertainties()
    with h5py.File(filename_h5, 'a') as h5file:
        filename_written = write_objects_to_hdf5(h5file,
                                                 ar1=ar1,
                                                 ar2=ar2,
                                                 ar3=ar3,
                                                 aunc=ar1.alpha_unc)
    assert filename_written == filename_h5

    h5_to_pydict = {}
    with h5py.File(filename_h5, 'r') as h5file:
        ar1_read = read_object_from_hdf5(h5file['ar1'],
                                         h5_to_pydict=h5_to_pydict)
        ar2_read = read_object_from_hdf5(h5file['ar2'],
                                         h5_to_pydict=h5_to_pydict)
        ar3_read = read_object_from_hdf5(h5file['ar3'],
                                         h5_to_pydict=h5_to_pydict)
        aunc = read_object_from_hdf5(h5file['aunc'], h5_to_pydict=h5_to_pydict)
    check_alg_results_IO(ar1_read, ar1, uncertainty_flag=True)
    check_alg_results_IO(ar2_read, ar2, uncertainty_flag=True)
    check_alg_results_IO(ar3_read, ar3, uncertainty_flag=True)
    # check hard link across multiple write calls (within a file session)
    assert aunc is ar1_read['alpha_unc']
    os.remove(filename_h5)

    # filename provided, including extension (single object)
    filename_written = write_objects_to_hdf5(filename_h5, ar=ar)
    assert filename_written == filename_h5
    with h5py.File(filename_h5, 'r') as h5file:
        ar_read = read_object_from_hdf5(h5file['ar'])
    check_alg_results_IO(ar_read, ar, uncertainty_flag=True)
    os.remove(filename_h5)

    # filename provided as *.hdf5
    filename_written = write_objects_to_hdf5(filename_hdf5, ar=ar)
    assert filename_written == filename_hdf5
    with h5py.File(filename_hdf5, 'r') as h5file:
        ar_read = read_object_from_hdf5(h5file['ar'])
    check_alg_results_IO(ar_read, ar, uncertainty_flag=True)
    os.remove(filename_hdf5)

    # filename provided without extension. check that extension is added
    filename_written = write_objects_to_hdf5(filename, ar=ar)
    assert filename_written == filename_h5
    with h5py.File(filename_h5, 'r') as h5file:
        ar_read = read_object_from_hdf5(h5file['ar'])
    check_alg_results_IO(ar_read, ar, uncertainty_flag=True)
    os.remove(filename_h5)