Ejemplo n.º 1
0
def test_appending():
    x = np.arange(3)
    y = np.repeat(np.linspace(0, 1, 5).reshape(1, -1), 3, 0)
    z = np.arange(y.size).reshape(y.shape)

    data = dd.DataDict(
        x=dict(values=x,
               unit='A',
               __info__='careful!',
               __moreinfo__='more words in here'),
        y=dict(values=y, unit='B'),
        z=dict(values=z, axes=['x', 'y'], unit='C'),
        __desc__='some description',
    )
    assert data.validate()

    dds.datadict_to_hdf5(data, FN, append_mode=dds.AppendMode.none)
    assert _clean_from_file(dds.datadict_from_hdf5(FN)) == data

    data.add_data(x=[4],
                  y=np.linspace(0, 1, 5).reshape(1, -1),
                  z=np.arange(5).reshape(1, -1))
    assert data.validate()

    dds.datadict_to_hdf5(data, FN, append_mode=dds.AppendMode.new)
    assert _clean_from_file(dds.datadict_from_hdf5(FN)) == data

    dds.datadict_to_hdf5(data, FN, append_mode=dds.AppendMode.all)
    ret = _clean_from_file(dds.datadict_from_hdf5(FN))
    assert ret == (data + data)
Ejemplo n.º 2
0
    def savetrace(self, avgnum=3, savedir=None, name=None):
        if savedir == None:
            savedir = easygui.diropenbox("Choose file location: ")
            assert savedir != None
        if name == None:
            name = easygui.enterbox("Enter Trace Name: ")
            assert name != None

        elif savedir == "previous":
            savedir = self.previous_save
            assert savedir != None

        data = dd.DataDict(
            frequency=dict(unit='Hz'),
            power=dict(axes=['frequency'], unit='dB'),
            phase=dict(axes=['frequency'], unit='Degrees'),
        )

        prev_trform = self.trform()

        with dds.DDH5Writer(savedir, data, name=name) as writer:
            freqs = self.getSweepData()  #1XN array, N in [1601,1000]
            vnadata = np.array(
                self.average(avgnum))  #2xN array, N in [1601, 1000]
            writer.add_data(frequency=freqs,
                            power=vnadata[0],
                            phase=vnadata[1])

        self.trform(prev_trform)
        self.previous_save = savedir
Ejemplo n.º 3
0
def test_basic_storage_and_retrieval():
    x = np.arange(3)
    y = np.repeat(np.linspace(0, 1, 5).reshape(1, -1), 3, 0)
    z = np.arange(y.size).reshape(y.shape)

    data = dd.DataDict(
        x=dict(values=x,
               unit='A',
               __info__='careful!',
               __moreinfo__='more words in here'),
        y=dict(values=y, unit='B'),
        z=dict(values=z, axes=['x', 'y'], unit='C'),
        __desc__='some description',
    )
    assert data.validate()

    dds.datadict_to_hdf5(data, str(FILEPATH), append_mode=dds.AppendMode.none)
    datafromfile = dds.datadict_from_hdf5(str(FILEPATH))

    # hdf5 saving added a few extra metas that we need to ignore when
    # comparing
    datafromfile = _clean_from_file(datafromfile)
    assert (data == datafromfile)

    FILEPATH.unlink()
Ejemplo n.º 4
0
 def savetrace(self, avgnum=1, savedir=None, name='CXA_trace'):
     if savedir == None:
         import easygui
         savedir = easygui.diropenbox(
             "Choose place to save trace information: ")
         assert savedir != None
     SA_data = self.get_data(count=avgnum)
     data = dd.DataDict(
         frequency=dict(unit='Hz'),
         power=dict(axes=['frequency'], unit='dBm'),
     )
     with dds.DDH5Writer(savedir, data, name=name) as writer:
         writer.add_data(frequency=SA_data[:, 0], power=SA_data[:, 1])
Ejemplo n.º 5
0
def test_loader_node(qtbot):
    dds.DDH5Loader.useUi = False

    x = np.arange(3)
    y = np.repeat(np.linspace(0, 1, 5).reshape(1, -1), 3, 0)
    z = np.arange(y.size).reshape(y.shape)

    data = dd.DataDict(
        x=dict(values=x,
               unit='A',
               __info__='careful!',
               __moreinfo__='more words in here'),
        y=dict(values=y, unit='B'),
        z=dict(values=z, axes=['x', 'y'], unit='C'),
        __desc__='some description',
    )
    assert data.validate()
    dds.datadict_to_hdf5(data, str(FILEPATH), append_mode=dds.AppendMode.new)
    assert _clean_from_file(dds.datadict_from_hdf5(str(FILEPATH))) == data

    fc = linearFlowchart(('loader', dds.DDH5Loader))
    node = fc.nodes()['loader']

    assert fc.outputValues()['dataOut'] is None

    with qtbot.waitSignal(node.loadingWorker.dataLoaded,
                          timeout=1000) as blocker:
        node.filepath = str(FILEPATH)
    out = fc.outputValues()['dataOut'].copy()
    out.pop('__title__')
    assert _clean_from_file(out) == data

    data.add_data(x=[3],
                  y=np.linspace(0, 1, 5).reshape(1, -1),
                  z=np.arange(5).reshape(1, -1))
    dds.datadict_to_hdf5(data, str(FILEPATH), append_mode=dds.AppendMode.new)
    assert _clean_from_file(dds.datadict_from_hdf5(str(FILEPATH))) == data

    out = fc.outputValues()['dataOut'].copy()
    out.pop('__title__')
    assert not _clean_from_file(out) == data

    with qtbot.waitSignal(node.loadingWorker.dataLoaded,
                          timeout=1000) as blocker:
        node.update()
    out = fc.outputValues()['dataOut'].copy()
    out.pop('__title__')
    assert _clean_from_file(out) == data

    FILEPATH.unlink()
Ejemplo n.º 6
0
def test_writer_with_large_data():
    writer = _Writer()

    ref_data = writer.mkdata()
    ref_dataset = dd.DataDict(
        x=dict(values=ref_data, unit='W'),
        y=dict(values=ref_data**2, unit='T', axes=['x']),
    )
    ref_dataset['__dataset.name__'] = ''

    writer.start()
    writer.join()

    dataset_from_file = dds.datadict_from_hdf5(writer.filepath)
    assert (_clean_from_file(dataset_from_file) == ref_dataset)

    rmtree('./TESTDATA')
Ejemplo n.º 7
0
def test_concurrent_write_and_read():
    writer = _Writer()

    ref_data = writer.mkdata()
    ref_dataset = dd.DataDict(
        x=dict(values=ref_data, unit='W'),
        y=dict(values=ref_data**2, unit='T', axes=['x']),
    )
    ref_dataset['__dataset.name__'] = ''

    writer.start()
    while writer.is_alive():
        time.sleep(2)
        data_from_file = dds.datadict_from_hdf5(writer.filepath,
                                                structure_only=True)
        assert (data_from_file.structure(include_meta=False))

    dataset_from_file = dds.datadict_from_hdf5(writer.filepath)
    assert (_clean_from_file(dataset_from_file) == ref_dataset)

    rmtree('./TESTDATA')