Example #1
0
def three_incompatible_3d_sets(nx: int = 3,
                               ny: int = 3,
                               nz: int = 3,
                               rand_factor: int = 1) -> DataDict:
    x = np.linspace(0, 10, nx)
    y = np.linspace(-5, 5, ny)
    z = np.arange(nz)
    xx, yy, zz = np.meshgrid(x, y, z, indexing='ij')
    dd = np.cos(xx) * np.sin(yy) + rand_factor * np.random.rand(*zz.shape)
    dd2 = np.sin(xx) * np.cos(yy) + rand_factor * np.random.rand(*zz.shape)
    dd3 = np.cos(xx)**2 * np.exp(
        -yy**2 * 0.2) + rand_factor * np.random.rand(*zz.shape)

    d = DataDict(
        x=dict(values=xx.reshape(-1), unit='mA'),
        y=dict(values=yy.reshape(-1), unit='uC'),
        z=dict(values=zz.reshape(-1), unit='nF'),
        data=dict(values=dd.reshape(-1), axes=['x', 'y', 'z'], unit='kW'),
        more_data=dict(values=dd2.reshape(-1), axes=['y', 'x', 'z'],
                       unit='MV'),
        different_data=dict(values=dd3.reshape(-1),
                            axes=['z', 'y', 'x'],
                            unit='TS'),
    )
    d.validate()
    return d
Example #2
0
def test_load_2dsoftsweep_known_shape(experiment):
    N = 1
    m = qc.Measurement(exp=experiment)
    m.register_custom_parameter('x', unit='cm')
    m.register_custom_parameter('y')

    # check that unused parameters don't mess with
    m.register_custom_parameter('foo')
    dd_expected = DataDict(x=dict(values=np.array([]), unit='cm'),
                           y=dict(values=np.array([])))
    for n in range(N):
        m.register_custom_parameter(f'z_{n}', setpoints=['x', 'y'])
        dd_expected[f'z_{n}'] = dict(values=np.array([]), axes=['x', 'y'])
    dd_expected.validate()

    shape = (3, 3)

    m.set_shapes({'z_0': shape})

    with m.run() as datasaver:
        for result in testdata.generate_2d_scalar_simple(*shape, N):
            row = [(k, v) for k, v in result.items()] + [('foo', 1)]
            datasaver.add_result(*row)
            dd_expected.add_data(**result)

    dd_expected['x']['values'] = dd_expected['x']['values'].reshape(*shape)
    dd_expected['y']['values'] = dd_expected['y']['values'].reshape(*shape)
    dd_expected['z_0']['values'] = dd_expected['z_0']['values'].reshape(*shape)

    # retrieve data as data dict
    ddict = ds_to_datadict(datasaver.dataset)
    assert ddict == dd_expected
Example #3
0
def test_load_2dsoftsweep():
    qc.config.core.db_location = DBPATH
    initialise_database()
    exp = load_or_create_experiment('2d_softsweep', sample_name='no sample')

    N = 5
    m = qc.Measurement(exp=exp)
    m.register_custom_parameter('x')
    m.register_custom_parameter('y')

    # check that unused parameters don't mess with
    m.register_custom_parameter('foo')
    dd_expected = DataDict(x=dict(values=np.array([])),
                           y=dict(values=np.array([])))
    for n in range(N):
        m.register_custom_parameter(f'z_{n}', setpoints=['x', 'y'])
        dd_expected[f'z_{n}'] = dict(values=np.array([]), axes=['x', 'y'])
    dd_expected.validate()

    with m.run() as datasaver:
        for result in testdata.generate_2d_scalar_simple(3, 3, N):
            row = [(k, v) for k, v in result.items()] + [('foo', 1)]
            datasaver.add_result(*row)
            dd_expected.add_data(**result)

    # retrieve data as data dict
    run_id = datasaver.dataset.captured_run_id
    ddict = datadict_from_path_and_run_id(DBPATH, run_id)
    assert ddict == dd_expected
Example #4
0
def two_1d_traces(nvals=11):
    x = np.linspace(0, 10, nvals)
    y = np.cos(x)
    z = np.cos(x)**2
    d = DataDict(
        x = {'values' : x},
        y = {'values' : y, 'axes' : ['x']},
        z = {'values' : z, 'axes' : ['x']},
    )
    d.validate()
    return d
Example #5
0
def test_meta():
    """Test accessing meta information."""

    dd = DataDict(
        x=dict(
            values=[1, 2, 3],
            __meta1__='abc',
            __meta2__='def',
        ),
        y=dict(
            values=[1, 2, 3],
            axes=['x'],
            __meta3__='123',
            __meta4__=None,
        ),
        __info__=lambda x: 0,
        __more_info__=object,
    )
    dd['__1234!__'] = '```'
    dd.add_meta('###', 3e-12)
    dd.add_meta('@^&', 0, data='x')

    assert dd.validate()

    global_meta = {k: v for k, v in dd.meta_items()}
    for k in ['info', 'more_info', '1234!', '###']:
        assert f'__{k}__' in dd
        assert k in global_meta

    assert dd.meta_val('more_info') == object
    assert dd.meta_val('info')(1) == 0
    assert dd.meta_val('@^&', 'x') == 0

    for k in ['meta1', 'meta2', '@^&']:
        assert dd.meta_val(k, data='x') == dd['x'][f'__{k}__']
        assert f'__{k}__' in dd['x']
        assert k in [n for n, _ in dd.meta_items('x')]

    # test stripping of meta information
    dd.clear_meta()
    assert dd.validate()

    nmeta = 0
    for k, _ in dd.items():
        if k[:2] == '__' and k[-2:] == '__':
            nmeta += 1
    assert nmeta == 0

    for d, v in dd.data_items():
        for k, _ in dd[d].items():
            if k[:2] == '__' and k[-2:] == '__':
                nmeta += 1
        assert nmeta == 0
Example #6
0
def test_validation():
    """Test if validation is working."""

    with pytest.raises(ValueError):
        dd = DataDict(y=dict(values=[0], axes=['x']))
        dd.validate()

    dd = DataDict(
        x=dict(values=[0]),
        y=dict(values=[0], axes=['x']),
    )
    assert dd.validate()
Example #7
0
def one_2d_set(nx=10, ny=10):
    x = np.linspace(0, 10, nx)
    y = np.arange(ny)

    xx, yy = np.meshgrid(x, y, indexing='ij')
    dd = np.cos(xx) + (-0.05 + 0.1 * np.random.rand(*yy.shape))

    d = DataDict(
        x = dict(values=xx.reshape(-1)),
        y = dict(values=yy.reshape(-1)),
        cos_data = dict(values=dd.reshape(-1), axes=['x', 'y']),
    )
    d.validate()
    return d
Example #8
0
def test_validation_fail():
    """Test if invalid data fails the validation,"""

    dd = DataDict(
        x=dict(values=[1, 2]),
        y=dict(values=[1, 2], axes=['x']),
    )
    assert dd.validate()

    dd = DataDict(
        x=dict(values=[1, 2, 3]),
        y=dict(values=[1, 2], axes=['x']),
    )
    with pytest.raises(ValueError):
        dd.validate()
Example #9
0
def two_compatible_noisy_2d_sets(nx: int = 10, ny: int = 10) -> DataDict:
    x = np.linspace(0, 10, nx)
    y = np.arange(ny)

    xx, yy = np.meshgrid(x, y, indexing='ij')
    dd = np.cos(xx) + (-0.05 + 0.1 * np.random.rand(*yy.shape))
    dd2 = np.sin(xx) + (-0.5 + 1 * np.random.rand(*yy.shape))

    d = DataDict(
        x=dict(values=xx.reshape(-1)),
        y=dict(values=yy.reshape(-1)),
        cos_data=dict(values=dd.reshape(-1), axes=['x', 'y']),
        sin_data=dict(values=dd2.reshape(-1), axes=['x', 'y']),
    )
    d.validate()
    return d
Example #10
0
def test_expansion_simple():
    """Test whether simple expansion of nested parameters works."""

    a = np.arange(3)
    x = np.arange(3)
    y = np.arange(7, 10)

    aaa, xxx, yyy = np.meshgrid(a, x, y, indexing='ij')
    zzz = aaa + xxx * yyy

    dd = DataDict(
        a=dict(values=a),
        x=dict(values=xxx),
        y=dict(values=yyy),
        z=dict(values=zzz),
    )

    assert dd.validate()
    assert dd.nrecords() == 3
    assert dd._inner_shapes() == dict(a=tuple(), x=(3, 3), y=(3, 3), z=(3, 3))
    assert dd.is_expandable()
    assert not dd.is_expanded()

    dd2 = dd.expand()
    assert dd2.is_expanded()
    assert dd2.nrecords() == aaa.size
    assert np.all(np.isclose(dd2.data_vals('a'), aaa.reshape(-1)))
    assert np.all(np.isclose(dd2.data_vals('x'), xxx.reshape(-1)))
    assert np.all(np.isclose(dd2.data_vals('z'), zzz.reshape(-1)))
    assert set(dd2.shapes().values()) == {(aaa.size, )}
Example #11
0
def test_add_data():
    """Testing simple adding of data"""

    # make base data
    dd = DataDict(
        x=dict(values=[1, 2, 3]),
        y=dict(values=np.arange(6).reshape(3, 2), axes=['x']),
    )
    assert dd.validate()

    # test bad data insertion
    with pytest.raises(ValueError):
        dd.add_data(x=[
            4,
        ])
    assert num.arrays_equal(
        dd.data_vals('x'),
        np.array([1, 2, 3]),
    )

    # this should work!
    dd.add_data(x=[
        4,
    ], y=[
        [6, 7],
    ])
    assert num.arrays_equal(dd.data_vals('x'), np.array([1, 2, 3, 4]))
    assert num.arrays_equal(dd.data_vals('y'), np.arange(8).reshape(4, 2))
Example #12
0
def test_sanitizing_2d():
    """Test if dataset cleanup gives expected results."""

    a = np.arange(2 * 4).astype(object).reshape(4, 2)
    a[1, :] = None
    a[3, :] = None
    a[2, -1] = None

    b = np.arange(2 * 4).astype(float).reshape(4, 2)
    b[1, :] = np.nan
    b[0, 0] = np.nan
    b[3, 0] = np.nan

    a_clean = np.vstack((a[0:1, :], a[2:, :]))
    b_clean = np.vstack((b[0:1, :], b[2:, :]))

    dd = DataDict(
        a=dict(values=a),
        b=dict(values=b, axes=['a']),
    )

    assert dd.validate()
    dd2 = dd.remove_invalid_entries()
    assert dd2.validate()
    assert dd2.shapes() == {'a': (3, 2), 'b': (3, 2)}
    assert num.arrays_equal(dd2.data_vals('a'), a_clean)
    assert num.arrays_equal(dd2.data_vals('b'), b_clean)
Example #13
0
def get_1d_scalar_cos_data(nx: int = 10, ndata: int = 1) -> DataDict:
    """
    return a datadict with `ndata` dependents.
    All have a cos-dependence on x (with increasing frequency).
    Also noise is added on top.
    """
    x = np.linspace(0, 10, nx)
    d = DataDict(x=dict(values=x, unit='A'))
    for n in range(ndata):
        dd = np.cos((n + 1) * x) + (-0.1 + 0.2 * np.random.rand(x.size))
        d[f"data_{n+1}"] = dict(values=dd, axes=[
            'x',
        ], unit='a.u.')

    d.validate()
    return d
Example #14
0
def test_set_grid_with_order(qtbot):
    """Test making meshgrid when the internal axis order needs to be fixed."""

    DataGridder.useUi = False
    DataGridder.uiClass = None

    fc = linearFlowchart(('grid', DataGridder))
    node = fc.nodes()['grid']

    x = np.arange(5.0)
    y = np.linspace(0, 1, 5)
    z = np.arange(4.0, 6.0, 1.0)
    xx, yy, zz = np.meshgrid(x, y, z, indexing='ij')
    vv = xx * yy * zz
    x1d, y1d, z1d = xx.flatten(), yy.flatten(), zz.flatten()
    v1d = vv.flatten()

    # construct data dict, with axes for vals not conforming to the
    # correct order with which we've generated the data
    data = DataDict(x=dict(values=x1d),
                    y=dict(values=y1d),
                    z=dict(values=z1d),
                    vals=dict(values=v1d, axes=['y', 'z', 'x']))
    assert data.validate()

    # in the 1-d data, nothing unusual should happen
    fc.setInput(dataIn=data)
    assert num.arrays_equal(
        fc.outputValues()['dataOut'].data_vals('vals'),
        v1d,
    )

    # guessing the grid should work, and fix the wrong order
    node.grid = GridOption.guessShape, dict()
    assert num.arrays_equal(
        fc.outputValues()['dataOut'].data_vals('vals'),
        vv.transpose((1, 2, 0)),
    )
    assert fc.outputValues()['dataOut']['vals']['axes'] == ['y', 'z', 'x']

    # finally, specify manually. omitting inner shape doesn't work
    node.grid = GridOption.specifyShape, dict(shape=(5, 2, 5))
    assert fc.outputValues()['dataOut'].data_vals('vals').shape == (5, 2, 5)
    assert not num.arrays_equal(
        fc.outputValues()['dataOut'].data_vals('vals'),
        vv.transpose((1, 2, 0)),
    )

    # but using the right inner axis order should do it
    node.grid = GridOption.specifyShape, dict(order=['x', 'y', 'z'],
                                              shape=(5, 5, 2))
    assert fc.outputValues()['dataOut'].data_vals('vals').shape == (5, 2, 5)
    assert num.arrays_equal(
        fc.outputValues()['dataOut'].data_vals('vals'),
        vv.transpose((1, 2, 0)),
    )
Example #15
0
def test_update_qcloader(qtbot, empty_db_path):
    db_path = empty_db_path

    exp = load_or_create_experiment('2d_softsweep', sample_name='no sample')

    N = 2
    m = qc.Measurement(exp=exp)
    m.register_custom_parameter('x')
    m.register_custom_parameter('y')
    dd_expected = DataDict(x=dict(values=np.array([])),
                           y=dict(values=np.array([])))
    for n in range(N):
        m.register_custom_parameter(f'z_{n}', setpoints=['x', 'y'])
        dd_expected[f'z_{n}'] = dict(values=np.array([]), axes=['x', 'y'])
    dd_expected.validate()

    # setting up the flowchart
    fc = linearFlowchart(('loader', QCodesDSLoader))
    loader = fc.nodes()['loader']

    def check():
        nresults = ds.number_of_results
        loader.update()
        ddict = fc.output()['dataOut']

        if ddict is not None and nresults > 0:
            z_in = dd_expected.data_vals('z_1')
            z_out = ddict.data_vals('z_1')
            if z_out is not None:
                assert z_in.size == z_out.size
                assert np.allclose(z_in, z_out, atol=1e-15)

    with m.run() as datasaver:
        ds = datasaver.dataset
        run_id = datasaver.dataset.captured_run_id
        loader.pathAndId = db_path, run_id

        for result in testdata.generate_2d_scalar_simple(3, 3, N):
            row = [(k, v) for k, v in result.items()]
            datasaver.add_result(*row)
            dd_expected.add_data(**result)
            check()
        check()
Example #16
0
def get_2d_scalar_cos_data(nx=10, ny=10, ndata=1):
    """
    return a datadict with `ndata` dependents.
    All have a cos-dependence on x (with increasing frequency),
    and repetitions along y.
    Also noise is added on top.
    """
    x = np.linspace(0, 10, nx)
    y = np.arange(ny)
    xx, yy = np.meshgrid(x, y, indexing='ij')

    d = DataDict(
        x=dict(values=xx.reshape(-1), unit='A'),
        y=dict(values=yy.reshape(-1), unit='B'),
    )
    for n in range(ndata):
        dd = np.cos((n + 1) * xx) + (-0.1 + 0.2 * np.random.rand(*yy.shape))
        d[f"data_{n+1}"] = dict(values=dd.reshape(-1), axes=['x', 'y'])

    d.validate()
    return d
Example #17
0
def test_expansion_fail():
    """Test whether expansion fails correctly"""

    dd = DataDict(a=dict(values=np.arange(4).reshape(2, 2)),
                  b=dict(values=np.arange(4).reshape(2, 2), axes=['a']),
                  x=dict(values=np.arange(6).reshape(2, 3), ),
                  y=dict(values=np.arange(6).reshape(2, 3), axes=['x']))

    assert dd.validate()
    assert not dd.is_expandable()
    with pytest.raises(ValueError):
        dd.expand()
Example #18
0
def test_sanitizing_1d():
    """Test if dataset cleanup gives expected results."""
    a = np.arange(10).astype(object)
    a[4:6] = None
    b = np.arange(10).astype(complex)
    b[4] = np.nan

    a_clean = np.hstack((a[:4], a[5:]))
    b_clean = np.hstack((b[:4], b[5:]))

    dd = DataDict(
        a=dict(values=a),
        b=dict(values=b, axes=['a']),
    )

    assert dd.validate()
    dd2 = dd.remove_invalid_entries()
    assert dd2.validate()
    assert num.arrays_equal(dd2.data_vals('a'), a_clean)
    assert num.arrays_equal(dd2.data_vals('b'), b_clean)
Example #19
0
def test_basic_flowchart_and_nodes(qtbot):
    fc = flowchart()
    node = Node(name='node')

    fc.addNode(node, name=node.name())

    fc.connectTerminals(fc['dataIn'], node['dataIn'])
    fc.connectTerminals(node['dataOut'], fc['dataOut'])

    data = DataDict(data=dict(values=[1, 2, 3]))
    assert data.validate()

    fc.setInput(dataIn=data)
    assert fc.outputValues() == dict(dataOut=data)

    for i in range(3):
        lst = [(f'node{j}', Node) for j in range(i)]
        fc = linearFlowchart(*lst)
        fc.setInput(dataIn=data)
        assert fc.outputValues() == dict(dataOut=data)
Example #20
0
def test_shapes():
    """Test correct retrieval of shapes, incl nested shapes."""

    dd = DataDict(
        x=dict(values=[1, 2, 3], ),
        y=dict(
            values=[1, 2, 3],
            axes=['x'],
        ),
        z=dict(
            values=[[0, 0], [1, 1], [2, 2]],
            axes=['x'],
        ),
    )

    assert dd.validate()

    shapes = dd.shapes()
    assert shapes['x'] == (3, )
    assert shapes['y'] == (3, )
    assert shapes['z'] == (3, 2)
Example #21
0
def test_nontrivial_expansion():
    """test expansion when different dependents require different
    expansion of an axis."""

    a = np.arange(4)
    b = np.arange(4 * 2).reshape(4, 2)
    x = np.arange(4)
    y = np.arange(4 * 2).reshape(4, 2)

    dd = DataDict(a=dict(values=a),
                  b=dict(values=b),
                  x=dict(values=x, axes=['a']),
                  y=dict(values=y, axes=['a', 'b']))

    assert dd.validate()
    assert dd.is_expandable()

    dd_x = dd.extract('x').expand()
    assert num.arrays_equal(a, dd_x.data_vals('a'))

    dd_y = dd.extract('y').expand()
    assert num.arrays_equal(a.repeat(2), dd_y.data_vals('a'))
Example #22
0
def test_basic_gridding(qtbot):
    """Test simple gridding tasks"""

    DataGridder.useUi = False
    DataGridder.uiClass = None

    fc = linearFlowchart(('grid', DataGridder))
    node = fc.nodes()['grid']

    x = np.arange(5.0)
    y = np.linspace(0, 1, 5)
    z = np.arange(4.0, 6.0, 1.0)
    xx, yy, zz = np.meshgrid(x, y, z, indexing='ij')
    vv = xx * yy * zz
    x1d, y1d, z1d = xx.flatten(), yy.flatten(), zz.flatten()
    v1d = vv.flatten()
    data = DataDict(x=dict(values=x1d),
                    y=dict(values=y1d),
                    z=dict(values=z1d),
                    vals=dict(values=v1d, axes=['x', 'y', 'z']))
    assert data.validate()

    fc.setInput(dataIn=data)
    assert num.arrays_equal(
        fc.outputValues()['dataOut'].data_vals('vals'),
        v1d,
    )

    node.grid = GridOption.guessShape, dict()
    assert num.arrays_equal(
        fc.outputValues()['dataOut'].data_vals('vals'),
        vv,
    )

    node.grid = GridOption.specifyShape, dict(shape=(5, 5, 2))
    assert num.arrays_equal(
        fc.outputValues()['dataOut'].data_vals('vals'),
        vv,
    )
def test_equality():
    """test whether direct comparison of datasets is working."""
    dd1 = DataDict(
        x=dict(values=np.arange(5), unit='A'),
        y=dict(values=np.arange(5)**2, axes=['x']),
    )
    assert dd1.validate()
    dd1.add_meta('some_info', 'some_value')
    dd2 = dd1.copy()
    assert datasets_are_equal(dd1, dd2)
    assert dd1 == dd2

    dd2 = dd1.copy()
    dd2.delete_meta('some_info')
    assert not datasets_are_equal(dd1, dd2)
    assert not dd1 == dd2
    assert datasets_are_equal(dd1, dd2, ignore_meta=True)

    dd2 = dd1.copy()
    dd2['x']['unit'] = 'B'
    assert not datasets_are_equal(dd1, dd2)

    dd2 = dd1.copy()
    dd2['y']['values'][-1] -= 1
    assert not datasets_are_equal(dd1, dd2)

    dd2 = DataDictBase(**dd1)
    assert not datasets_are_equal(dd1, dd2)

    dd2 = datadict_to_meshgrid(dd1)
    assert not datasets_are_equal(dd1, dd2)

    dd2 = dd1.copy()
    dd2['w'] = dict(values=np.arange(5), unit='C')
    dd2['y']['axes'] = ['w', 'x']
    assert not datasets_are_equal(dd1, dd2)

    assert not dd1 == 'abc'
Example #24
0
def test_basic_scale_units(qtbot):

    ScaleUnits.useUi = False
    ScaleUnits.uiClass = None

    fc = linearFlowchart(('scale_units', ScaleUnits))
    node = fc.nodes()['scale_units']

    x = np.arange(0, 5.0e-9, 1.0e-9)
    y = np.linspace(0, 1e9, 5)
    z = np.arange(4.0e6, 6.0e6, 1.0e6)
    xx, yy, zz = np.meshgrid(x, y, z, indexing='ij')
    vv = xx * yy * zz
    x1d, y1d, z1d = xx.flatten(), yy.flatten(), zz.flatten()
    v1d = vv.flatten()
    data = DataDict(x=dict(values=x1d, unit='V'),
                    y=dict(values=y1d, unit="A"),
                    z=dict(values=z1d, unit="Foobar"),
                    vals=dict(values=v1d, axes=['x', 'y', 'z']))
    assert data.validate()

    fc.setInput(dataIn=data)

    output = fc.outputValues()['dataOut']

    assert output['x']['unit'] == 'nV'
    assert_allclose(output['x']["values"], (xx * 1e9).ravel())

    assert output['y']['unit'] == 'GA'
    assert_allclose(output['y']["values"], (yy / 1e9).ravel())

    assert output['z']["unit"] == '$10^{6}$ Foobar'
    assert_allclose(output['z']["values"], (zz / 1e6).ravel())

    assert output['vals']['unit'] == ''
    assert_allclose(output['vals']['values'], vv.flatten())
def test_combine_ddicts():
    """test the datadict combination function"""

    # first case: two ddicts with different independents and shared axes.
    # should work. probably the most common use case.
    dd1 = DataDict(x=dict(values=np.array([1, 2, 3]), ),
                   y=dict(values=np.array([1, 2, 3]), ),
                   z1=dict(
                       values=np.array([1, 2, 3]),
                       axes=['x', 'y'],
                   ))
    dd1.validate()

    dd2 = DataDict(x=dict(values=np.array([1, 2, 3]), ),
                   y=dict(values=np.array([1, 2, 3]), ),
                   z2=dict(
                       values=np.array([3, 2, 1]),
                       axes=['x', 'y'],
                   ))
    dd2.validate()

    combined_dd = combine_datadicts(dd1, dd2)
    expected_dd = DataDict(
        x=dict(values=np.array([1, 2, 3]), ),
        y=dict(values=np.array([1, 2, 3]), ),
        z1=dict(
            values=np.array([1, 2, 3]),
            axes=['x', 'y'],
        ),
        z2=dict(
            values=np.array([3, 2, 1]),
            axes=['x', 'y'],
        ),
    )
    expected_dd.validate()
    assert combined_dd == expected_dd

    # second case: two ddicts with a conflict in an axis
    dd1 = DataDict(x=dict(values=np.array([1, 2, 3]), ),
                   y=dict(values=np.array([1, 2, 3]), ),
                   z1=dict(
                       values=np.array([1, 2, 3]),
                       axes=['x', 'y'],
                   ))
    dd1.validate()

    dd2 = DataDict(x=dict(values=np.array([1, 2, 4]), ),
                   y=dict(values=np.array([1, 2, 3]), ),
                   z2=dict(
                       values=np.array([3, 2, 1]),
                       axes=['x', 'y'],
                   ))
    dd2.validate()

    combined_dd = combine_datadicts(dd1, dd2)
    expected_dd = DataDict(x=dict(values=np.array([1, 2, 3]), ),
                           y=dict(values=np.array([1, 2, 3]), ),
                           z1=dict(
                               values=np.array([1, 2, 3]),
                               axes=['x', 'y'],
                           ),
                           x_0=dict(values=np.array([1, 2, 4]), ),
                           z2=dict(
                               values=np.array([3, 2, 1]),
                               axes=['x_0', 'y'],
                           ))
    expected_dd.validate()
    assert combined_dd == expected_dd

    # third case: rename a dependent only
    x = np.array([1, 2, 3])
    y = np.array([1, 2, 3])
    z = np.arange(3)
    dd1 = DataDict(x=dict(values=x),
                   y=dict(values=y),
                   z=dict(values=z, axes=['x', 'y']))
    dd1.validate()
    dd2 = dd1.copy()
    dd2['z']['values'] = z[::-1]
    dd2.validate()

    combined_dd = combine_datadicts(dd1, dd2)
    expected_dd = DataDict(x=dict(values=x),
                           y=dict(values=y),
                           z=dict(values=z, axes=['x', 'y']),
                           z_0=dict(values=z[::-1], axes=['x', 'y']))
    assert combined_dd == expected_dd