Esempio n. 1
0
class TestThreading(TestCase):
    from dat_analysis.dat_object.make_dat import DatHandler, get_dat, get_dats
    from dat_analysis.data_standardize.exp_specific.Feb21 import Feb21Exp2HDF
    from concurrent.futures import ThreadPoolExecutor

    dat_dir = os.path.abspath('fixtures/dats/2021Feb')

    # Where to put outputs (i.e. DatHDFs)
    Testing_Exp2HDF = get_testing_Exp2HDF(dat_dir,
                                          output_dir,
                                          base_class=Feb21Exp2HDF)

    pool = ThreadPoolExecutor(max_workers=5)
    different_dats = get_dats([717, 719, 720, 723, 724, 725],
                              exp2hdf=Testing_Exp2HDF)
    single_dat = different_dats[0]
    same_dats = [single_dat] * 10

    def test_threaded_manipulate_test(self):
        """Test that running multiple threads through a method which changes an instance attribute works with
        thread locks"""
        def threaded_manipulate_test(dat: DatHDF):
            eq = dat._threaded_manipulate_test()
            return eq

        t1 = time.time()
        rets = list(
            self.pool.map(threaded_manipulate_test, self.different_dats))
        print(f'Time elapsed: {time.time()-t1:.2f}s, Returns = {rets}')
        self.assertTrue(all(rets))

        t1 = time.time()
        rets = list(self.pool.map(threaded_manipulate_test, self.same_dats))
        print(f'Time elapsed: {time.time()-t1:.2f}s, Returns = {rets}')
        self.assertTrue(all(rets))

    def test_threaded_reentrant_test(self):
        """Test that the reentrant lock allows a recursive method call to work properly"""
        t1 = time.time()
        ret = self.single_dat._threaded_reentrant_test(i=0)
        print(f'Time elapsed: {time.time()-t1:.2f}s, Returns = {ret}')
        self.assertEqual(3, ret)

        def reentrant_test(dat: DatHDF, i):
            return dat._threaded_reentrant_test(i=i)

        t1 = time.time()
        rets = list(
            self.pool.map(reentrant_test, self.different_dats,
                          [0, 7, 1, 5, 1, 7]))
        print(f'Time elapsed: {time.time()-t1:.2f}s, Returns = {rets}')
        self.assertEqual([3, 7, 3, 5, 3, 7], rets)

        t1 = time.time()
        rets = list(
            self.pool.map(reentrant_test, self.same_dats, [0, 7, 1, 5, 1, 7]))
        print(f'Time elapsed: {time.time()-t1:.2f}s, Returns = {rets}')
        self.assertEqual([3, 7, 3, 5, 3, 7], rets)
Esempio n. 2
0
class TestExpConfigGroupDatAttribute(TestCase):
    helpers.clear_outputs(output_dir)
    exp2hdf = helpers.get_testing_Exp2HDF(dat_dir=dat_dir,
                                          output_dir=output_dir)(9111)
    builder = DatHDFBuilder(exp2hdf, init_level='min')
    builder.create_hdf()
    builder.copy_exp_data()
    builder.init_DatHDF()
    builder.init_base_attrs()
    dat = builder.dat
    E = Testing_ExpConfigDatAttribute(dat, exp_config=ExpConfig(9111))

    def setUp(self):
        """Runs before every test"""
        pass

    def tearDown(self):
        """Runs after every test"""
        pass

    def test__set_default_data_descriptors(self):
        self.E._set_default_data_descriptors()
        with h5py.File(self.E.hdf.hdf_path, 'r') as f:
            group = f.get(self.E.group_name + '/Default DataDescriptors')
            keys = group.keys()
            self.assertTrue(
                {'cscurrent', 'cscurrent_2d', 'x_array', 'y_array'} -
                set(keys) == set())  # Check expected keys are there

    def test__initialize_minimum(self):
        self.E.initialize_minimum()
        self.assertTrue(self.E.initialized)

    def test__set_sweeplog_subs(self):
        self.E._set_sweeplog_subs()
        with h5py.File(self.E.hdf.hdf_path, 'r') as f:
            group = f.get(self.E.group_name)
            subs = HDU.get_attr(group, 'sweeplog_substitutions', None)
            self.assertEqual(subs, {'FastDAC 1': 'FastDAC'})

    def test_get_sweeplogs(self):
        sweeplogs = self.E.get_sweeplogs()
        self.assertEqual(sweeplogs['filenum'], 9111)

    def test_get_default_data_infos(self):
        self.test__set_default_data_descriptors()
        default_infos = self.E.get_default_data_infos()
        expected_info = DataInfo('i_sense')
        self.assertEqual(default_infos['cscurrent'], expected_info)

    def test_clear_caches(self):
        self.E.clear_caches()
        self.assertTrue(True)
Esempio n. 3
0
import shutil
import time
from tests import helpers

dat_dir = os.path.abspath('fixtures/dats/2020Sep')
"""
Contents of dat_dir relevant in this file:
    Dat9111: Square entropy with dS ~Ln2

"""

# Where to put outputs (i.e. DatHDFs)
output_dir = os.path.abspath('Outputs/test_DatHDFBuilder')
print(os.path.abspath('unit'))

Testing_Exp2HDF = get_testing_Exp2HDF(dat_dir, output_dir)

# SetUp before tests
helpers.clear_outputs(output_dir)
exp2hdf = Testing_Exp2HDF(9111, 'base')
builder = DatHDFBuilder(exp2hdf, 'min')
hdf_folder_path = os.path.join(output_dir, 'Dat_HDFs')
dat_hdf_path = os.path.join(
    hdf_folder_path, 'dat9111.h5')  # if datname=='base' it's not in filepath


class TestDatHDFBuilder(TestCase):
    def _del_hdf_contents(self):
        if os.path.exists(hdf_folder_path):
            for root, dirs, files in os.walk(hdf_folder_path):
                for f in files:
Esempio n. 4
0
from dat_analysis.data_standardize.exp_specific.Feb21 import Feb21Exp2HDF
import concurrent.futures
import os
import h5py
import numpy as np
import shutil
import time
from tests import helpers

dat_dir = os.path.abspath('fixtures/dats/2021Feb')

# Where to put outputs (i.e. DatHDFs)
output_dir = os.path.abspath('Outputs/test_multithread_access')
hdf_folder_path = os.path.join(output_dir, 'Dat_HDFs')

Testing_Exp2HDF = get_testing_Exp2HDF(dat_dir, output_dir, base_class=Feb21Exp2HDF)


def read(datnum: DatHDF):
    dat = get_dat(datnum, exp2hdf=Testing_Exp2HDF)
    val = dat._threaded_read_test()
    return val

def write(datnum: DatHDF, value):
    dat = get_dat(datnum, exp2hdf=Testing_Exp2HDF)
    val = dat._threaded_write_test(value)
    return val


def mutithread_read(datnums):
    with concurrent.futures.ThreadPoolExecutor(max_workers=len(datnums) + 3) as executor: