Example #1
0
def test_add_data():
    """Testing simple adding of data"""

    # make base data
    dd = DataDict(
        x=dict(values=[1, 2, 3]),
        y=dict(values=np.arange(6).reshape(3, 2), axes=['x']),
    )
    assert dd.validate()

    # test bad data insertion
    with pytest.raises(ValueError):
        dd.add_data(x=[
            4,
        ])
    assert num.arrays_equal(
        dd.data_vals('x'),
        np.array([1, 2, 3]),
    )

    # this should work!
    dd.add_data(x=[
        4,
    ], y=[
        [6, 7],
    ])
    assert num.arrays_equal(dd.data_vals('x'), np.array([1, 2, 3, 4]))
    assert num.arrays_equal(dd.data_vals('y'), np.arange(8).reshape(4, 2))
Example #2
0
def test_average_subtraction(qtbot):
    """Test the subtract average filter node"""

    SubtractAverage.useUi = False
    SubtractAverage.uiClass = None

    fc = linearFlowchart(('Subtract Average', SubtractAverage), )
    node = fc.nodes()['Subtract Average']

    x = np.arange(11) - 5.
    y = np.linspace(0, 10, 51)
    xx, yy = np.meshgrid(x, y, indexing='ij')
    zz = np.sin(yy) + xx
    zz_ref_avg_y = np.sin(yy) - np.sin(yy).mean()

    data = MeshgridDataDict(x=dict(values=xx),
                            y=dict(values=yy),
                            z=dict(values=zz, axes=['x', 'y']))
    assert data.validate()

    fc.setInput(dataIn=data)
    assert num.arrays_equal(zz, fc.outputValues()['dataOut'].data_vals('z'))

    node.averagingAxis = 'y'
    assert num.arrays_equal(
        zz_ref_avg_y,
        fc.outputValues()['dataOut'].data_vals('z'),
    )
def test_array_equality():
    """Test if two arrays are correctly identified as having equal content"""

    # different dtype should not matter
    a = np.arange(2 * 4).astype(int).reshape(4, 2)
    b = np.arange(2 * 4).astype(np.complex128).reshape(4, 2)
    assert num.arrays_equal(a, b)

    # different representation of invalid data should not matter
    a = np.arange(2 * 4).astype(object).reshape(4, 2)
    a[2, 0] = None
    b = np.arange(2 * 4).astype(np.complex128).reshape(4, 2)
    b[2, 0] = np.nan
    assert num.arrays_equal(a, b)

    # invalid is not the same as valid
    a = np.arange(2 * 4).astype(object).reshape(4, 2)
    a[2, 0] = 0
    b = np.arange(2 * 4).astype(np.complex128).reshape(4, 2)
    b[2, 0] = np.nan
    assert not num.arrays_equal(a, b)

    a = np.array(['a', 1, None])
    b = np.array(['b', 1, np.nan])
    assert not num.arrays_equal(a, b)

    a = np.array(['a', 1, None])
    b = np.array(['a', 1.0, None])
    assert num.arrays_equal(a, b)
Example #4
0
def test_sanitizing_2d():
    """Test if dataset cleanup gives expected results."""

    a = np.arange(2 * 4).astype(object).reshape(4, 2)
    a[1, :] = None
    a[3, :] = None
    a[2, -1] = None

    b = np.arange(2 * 4).astype(float).reshape(4, 2)
    b[1, :] = np.nan
    b[0, 0] = np.nan
    b[3, 0] = np.nan

    a_clean = np.vstack((a[0:1, :], a[2:, :]))
    b_clean = np.vstack((b[0:1, :], b[2:, :]))

    dd = DataDict(
        a=dict(values=a),
        b=dict(values=b, axes=['a']),
    )

    assert dd.validate()
    dd2 = dd.remove_invalid_entries()
    assert dd2.validate()
    assert dd2.shapes() == {'a': (3, 2), 'b': (3, 2)}
    assert num.arrays_equal(dd2.data_vals('a'), a_clean)
    assert num.arrays_equal(dd2.data_vals('b'), b_clean)
Example #5
0
def test_set_grid_with_order(qtbot):
    """Test making meshgrid when the internal axis order needs to be fixed."""

    DataGridder.useUi = False
    DataGridder.uiClass = None

    fc = linearFlowchart(('grid', DataGridder))
    node = fc.nodes()['grid']

    x = np.arange(5.0)
    y = np.linspace(0, 1, 5)
    z = np.arange(4.0, 6.0, 1.0)
    xx, yy, zz = np.meshgrid(x, y, z, indexing='ij')
    vv = xx * yy * zz
    x1d, y1d, z1d = xx.flatten(), yy.flatten(), zz.flatten()
    v1d = vv.flatten()

    # construct data dict, with axes for vals not conforming to the
    # correct order with which we've generated the data
    data = DataDict(x=dict(values=x1d),
                    y=dict(values=y1d),
                    z=dict(values=z1d),
                    vals=dict(values=v1d, axes=['y', 'z', 'x']))
    assert data.validate()

    # in the 1-d data, nothing unusual should happen
    fc.setInput(dataIn=data)
    assert num.arrays_equal(
        fc.outputValues()['dataOut'].data_vals('vals'),
        v1d,
    )

    # guessing the grid should work, and fix the wrong order
    node.grid = GridOption.guessShape, dict()
    assert num.arrays_equal(
        fc.outputValues()['dataOut'].data_vals('vals'),
        vv.transpose((1, 2, 0)),
    )
    assert fc.outputValues()['dataOut']['vals']['axes'] == ['y', 'z', 'x']

    # finally, specify manually. omitting inner shape doesn't work
    node.grid = GridOption.specifyShape, dict(shape=(5, 2, 5))
    assert fc.outputValues()['dataOut'].data_vals('vals').shape == (5, 2, 5)
    assert not num.arrays_equal(
        fc.outputValues()['dataOut'].data_vals('vals'),
        vv.transpose((1, 2, 0)),
    )

    # but using the right inner axis order should do it
    node.grid = GridOption.specifyShape, dict(order=['x', 'y', 'z'],
                                              shape=(5, 5, 2))
    assert fc.outputValues()['dataOut'].data_vals('vals').shape == (5, 2, 5)
    assert num.arrays_equal(
        fc.outputValues()['dataOut'].data_vals('vals'),
        vv.transpose((1, 2, 0)),
    )
Example #6
0
def test_cropping2d():
    """Test basic data cropping of 2d grids"""
    arr = np.arange(16.).reshape(4, 4)
    arr[2:] = np.nan
    data = np.random.rand(4, 4)

    x, y, z = num.crop2d(arr, arr.T, data)
    assert num.arrays_equal(x, arr[:2, :2])
    assert num.arrays_equal(y, arr.T[:2, :2])
    assert num.arrays_equal(z, data[:2, :2])
Example #7
0
def test_array_equality():
    """Test if two arrays are correctly identified as having equal content"""

    a = np.arange(2 * 4).astype(object).reshape(4, 2)
    a[2, 0] = None
    b = np.arange(2 * 4).astype(np.complex128).reshape(4, 2)
    b[2, 0] = np.nan
    assert num.arrays_equal(a, b)

    a = np.arange(2 * 4).astype(object).reshape(4, 2)
    a[2, 0] = 0
    b = np.arange(2 * 4).astype(np.complex128).reshape(4, 2)
    b[2, 0] = np.nan
    assert not num.arrays_equal(a, b)
Example #8
0
def test_xy_selector_with_roles(qtbot):
    """Testing XY selector using the roles 'meta' property."""

    XYSelector.uiClass = None

    fc = linearFlowchart(('xysel', XYSelector))
    node = fc.nodes()['xysel']

    x = np.arange(5.0)
    y = np.linspace(0, 1, 5)
    z = np.arange(4.0, 6.0, 1.0)
    xx, yy, zz = np.meshgrid(x, y, z, indexing='ij')
    vals = xx * yy * zz
    data = MeshgridDataDict(x=dict(values=xx),
                            y=dict(values=yy),
                            z=dict(values=zz),
                            vals=dict(values=vals, axes=['x', 'y', 'z']))
    assert data.validate()

    fc.setInput(dataIn=data)

    # this should return None, because no x/y axes were set.
    assert fc.outputValues()['dataOut'] is None

    # now select two axes, and test that the other one is correctly selected
    node.xyAxes = ('x', 'y')

    assert num.arrays_equal(fc.outputValues()['dataOut'].data_vals('vals'),
                            vals[:, :, 0])
    assert node.dimensionRoles == {
        'x': 'x-axis',
        'y': 'y-axis',
        'z': (ReductionMethod.elementSelection, [], {
            'index': 0,
            'axis': 2
        })
    }

    # now set the role directly through the meta property
    node.dimensionRoles = {
        'x': 'y-axis',
        'y': (ReductionMethod.average, [], {}),
        'z': 'x-axis',
    }

    assert node.xyAxes == ('z', 'x')
    assert num.arrays_equal(fc.outputValues()['dataOut'].data_vals('vals'),
                            vals[:, :, :].mean(axis=1).transpose((1, 0)))
def test_histogram_with_ui(qtbot):
    dataset = _make_testdata()
    assert dataset.validate()
    hist, edges = histogram(dataset.data_vals('noise'), axis=1, bins=15)

    Histogrammer.useUi = True
    fc = linearFlowchart(('h', Histogrammer))
    win = AutoPlotMainWindow(fc, loaderName=None, monitor=False)
    win.show()
    qtbot.addWidget(win)

    fc.setInput(dataIn=dataset)
    hnode = fc.nodes()['h']

    # emit signal manually right now, since that's what's connected.
    # setting value alone won't emit the change.
    hnode.ui.widget.nbins.setValue(15)
    hnode.ui.widget.nbins.editingFinished.emit()

    hnode.ui.widget.combo.setCurrentText('y')

    assert fc.outputValues()['dataOut'].dependents() == ['noise_count']
    assert fc.outputValues()['dataOut'].axes('noise_count') == \
        ['x', 'z', 'noise']
    assert arrays_equal(fc.outputValues()['dataOut']['noise_count']['values'],
                        hist)
Example #10
0
def test_array_reshape():
    """Test array reshaping with size adaption."""

    a = np.arange(10)
    out = num.array1d_to_meshgrid(a, (4, 4))
    assert out.shape == (4, 4)
    assert num.arrays_equal(out, np.append(a, 6 * [None]).reshape(4, 4))

    a = np.arange(10).astype(complex)
    out = num.array1d_to_meshgrid(a, (4, 4))
    assert out.shape == (4, 4)
    assert num.arrays_equal(out, np.append(a, 6 * [np.nan]).reshape(4, 4))

    a = np.arange(10).astype(float)
    out = num.array1d_to_meshgrid(a, (3, 3))
    assert out.shape == (3, 3)
    assert num.arrays_equal(out, a[:9].reshape(3, 3))
Example #11
0
def test_sanitizing_1d():
    """Test if dataset cleanup gives expected results."""
    a = np.arange(10).astype(object)
    a[4:6] = None
    b = np.arange(10).astype(complex)
    b[4] = np.nan

    a_clean = np.hstack((a[:4], a[5:]))
    b_clean = np.hstack((b[:4], b[5:]))

    dd = DataDict(
        a=dict(values=a),
        b=dict(values=b, axes=['a']),
    )

    assert dd.validate()
    dd2 = dd.remove_invalid_entries()
    assert dd2.validate()
    assert num.arrays_equal(dd2.data_vals('a'), a_clean)
    assert num.arrays_equal(dd2.data_vals('b'), b_clean)
Example #12
0
def test_reorder():
    """Test reordering of axes."""

    a = np.arange(3)
    b = np.arange(5, 10)
    c = np.linspace(0, 1, 3)
    aa, bb, cc = np.meshgrid(a, b, c, indexing='ij')
    zz = aa + bb + cc

    dd = MeshgridDataDict(a=dict(values=aa),
                          b=dict(values=bb),
                          c=dict(values=cc),
                          z=dict(values=zz, axes=['a', 'b', 'c']))

    assert dd.validate()
    dd = dd.reorder_axes(c=0)
    assert dd.axes('z') == ['c', 'a', 'b']
    assert num.arrays_equal(dd.data_vals('a'), aa.transpose([2, 0, 1]))
    assert num.arrays_equal(dd.data_vals('b'), bb.transpose([2, 0, 1]))
    assert num.arrays_equal(dd.data_vals('c'), cc.transpose([2, 0, 1]))
    assert num.arrays_equal(dd.data_vals('z'), zz.transpose([2, 0, 1]))
Example #13
0
def test_basic_gridding(qtbot):
    """Test simple gridding tasks"""

    DataGridder.useUi = False
    DataGridder.uiClass = None

    fc = linearFlowchart(('grid', DataGridder))
    node = fc.nodes()['grid']

    x = np.arange(5.0)
    y = np.linspace(0, 1, 5)
    z = np.arange(4.0, 6.0, 1.0)
    xx, yy, zz = np.meshgrid(x, y, z, indexing='ij')
    vv = xx * yy * zz
    x1d, y1d, z1d = xx.flatten(), yy.flatten(), zz.flatten()
    v1d = vv.flatten()
    data = DataDict(x=dict(values=x1d),
                    y=dict(values=y1d),
                    z=dict(values=z1d),
                    vals=dict(values=v1d, axes=['x', 'y', 'z']))
    assert data.validate()

    fc.setInput(dataIn=data)
    assert num.arrays_equal(
        fc.outputValues()['dataOut'].data_vals('vals'),
        v1d,
    )

    node.grid = GridOption.guessShape, dict()
    assert num.arrays_equal(
        fc.outputValues()['dataOut'].data_vals('vals'),
        vv,
    )

    node.grid = GridOption.specifyShape, dict(shape=(5, 5, 2))
    assert num.arrays_equal(
        fc.outputValues()['dataOut'].data_vals('vals'),
        vv,
    )
Example #14
0
def test_nontrivial_expansion():
    """test expansion when different dependents require different
    expansion of an axis."""

    a = np.arange(4)
    b = np.arange(4 * 2).reshape(4, 2)
    x = np.arange(4)
    y = np.arange(4 * 2).reshape(4, 2)

    dd = DataDict(a=dict(values=a),
                  b=dict(values=b),
                  x=dict(values=x, axes=['a']),
                  y=dict(values=y, axes=['a', 'b']))

    assert dd.validate()
    assert dd.is_expandable()

    dd_x = dd.extract('x').expand()
    assert num.arrays_equal(a, dd_x.data_vals('a'))

    dd_y = dd.extract('y').expand()
    assert num.arrays_equal(a.repeat(2), dd_y.data_vals('a'))
Example #15
0
def test_xy_selector(qtbot):
    """Basic XY selector node test."""

    XYSelector.uiClass = None

    fc = linearFlowchart(('xysel', XYSelector))
    node = fc.nodes()['xysel']

    x = np.arange(5.0)
    y = np.linspace(0, 1, 5)
    z = np.arange(4.0, 6.0, 1.0)
    xx, yy, zz = np.meshgrid(x, y, z, indexing='ij')
    vals = xx * yy * zz
    data = MeshgridDataDict(x=dict(values=xx),
                            y=dict(values=yy),
                            z=dict(values=zz),
                            vals=dict(values=vals, axes=['x', 'y', 'z']))
    assert data.validate()

    fc.setInput(dataIn=data)

    # this should return None, because no x/y axes were set.
    assert fc.outputValues()['dataOut'] is None

    # now select two axes, and test that the other one is correctly selected
    node.xyAxes = ('x', 'y')
    assert num.arrays_equal(fc.outputValues()['dataOut'].data_vals('vals'),
                            vals[:, :, 0])

    # try a different reduction on the third axis
    node.reductions = {'z': (ReductionMethod.average, [], {})}
    assert num.arrays_equal(fc.outputValues()['dataOut'].data_vals('vals'),
                            vals.mean(axis=-1))

    # Test transposing the data by flipping x/y
    node.xyAxes = ('y', 'x')
    assert num.arrays_equal(fc.outputValues()['dataOut'].data_vals('vals'),
                            vals.mean(axis=-1).transpose((1, 0)))
Example #16
0
def test_reduction(qtbot):
    """Test basic dimension reduction."""
    DimensionReducer.uiClass = None

    fc = linearFlowchart(('dim_red', DimensionReducer))
    node = fc.nodes()['dim_red']

    x = np.arange(5.0)
    y = np.linspace(0, 1, 5)
    z = np.arange(4.0, 6.0, 1.0)
    xx, yy, zz = np.meshgrid(x, y, z, indexing='ij')
    vals = xx * yy * zz
    data = MeshgridDataDict(x=dict(values=xx),
                            y=dict(values=yy),
                            z=dict(values=zz),
                            vals=dict(values=vals, axes=['x', 'y', 'z']))
    assert data.validate()

    fc.setInput(dataIn=data)
    assert num.arrays_equal(fc.outputValues()['dataOut'].data_vals('vals'),
                            vals)

    node.reductions = {'y': (np.mean, [], {})}

    out = fc.outputValues()['dataOut']
    assert num.arrays_equal(vals.mean(axis=1), out.data_vals('vals'))
    assert out.axes('vals') == ['x', 'z']

    node.reductions = {
        'y': (ReductionMethod.elementSelection, [], {
            'index': 0
        }),
        'z': (ReductionMethod.average, )
    }

    out = fc.outputValues()['dataOut']
    assert num.arrays_equal(vals[:, 0, :].mean(axis=-1), out.data_vals('vals'))
    assert out.axes('vals') == ['x']
Example #17
0
def test_meshgrid_conversion():
    """Test making a meshgrid from a dataset"""

    a = np.linspace(0, 1, 11)
    b = np.arange(5)
    aa, bb = np.meshgrid(a, b, indexing='ij')
    zz = aa * bb

    dd = DataDict(
        a=dict(values=aa.reshape(-1)),
        b=dict(values=bb.reshape(-1)),
        z=dict(values=zz.reshape(-1), axes=['a', 'b']),
        __info__='some info',
    )

    dd2 = datadict_to_meshgrid(dd, target_shape=(11, 5))
    assert DataDictBase.same_structure(dd, dd2)
    assert num.arrays_equal(dd2.data_vals('a'), aa)
    assert num.arrays_equal(dd2.data_vals('z'), zz)

    dd2 = datadict_to_meshgrid(dd, target_shape=None)
    assert DataDictBase.same_structure(dd, dd2)
    assert num.arrays_equal(dd2.data_vals('a'), aa)
    assert num.arrays_equal(dd2.data_vals('z'), zz)

    # test the case where inner/outer
    aa, bb = np.meshgrid(a, b, indexing='xy')
    zz = aa * bb

    dd = DataDict(
        a=dict(values=aa.reshape(-1)),
        b=dict(values=bb.reshape(-1)),
        z=dict(values=zz.reshape(-1), axes=['a', 'b']),
        __info__='some info',
    )

    dd2 = datadict_to_meshgrid(dd,
                               target_shape=(5, 11),
                               inner_axis_order=['b', 'a'])
    assert DataDictBase.same_structure(dd, dd2)
    assert num.arrays_equal(dd2.data_vals('a'), np.transpose(aa, (1, 0)))
    assert num.arrays_equal(dd2.data_vals('z'), np.transpose(zz, (1, 0)))

    dd2 = datadict_to_meshgrid(dd, target_shape=None)
    assert DataDictBase.same_structure(dd, dd2)
    assert num.arrays_equal(dd2.data_vals('a'), np.transpose(aa, (1, 0)))
    assert num.arrays_equal(dd2.data_vals('z'), np.transpose(zz, (1, 0)))
def test_real_histogram(qtbot):
    dataset = _make_testdata()
    assert dataset.validate()
    hist, edges = histogram(dataset.data_vals('noise'), axis=0, bins=10)

    Histogrammer.useUi = False
    fc = linearFlowchart(('h', Histogrammer))
    fc.setInput(dataIn=dataset)
    assert fc.outputValues()['dataOut'] == dataset

    fc.nodes()['h'].nbins = 10
    fc.nodes()['h'].histogramAxis = 'x'
    assert fc.outputValues()['dataOut'].dependents() == ['noise_count']
    assert fc.outputValues()['dataOut'].axes('noise_count') == \
        ['y', 'z', 'noise']
    assert arrays_equal(fc.outputValues()['dataOut']['noise_count']['values'],
                        hist)
Example #19
0
def datasets_are_equal(a: DataDictBase, b: DataDictBase,
                       ignore_meta: bool = False) -> bool:
    """Check whether two datasets are equal.

    Compares type, structure, and content of all fields.

    :param a: First dataset.
    :param b: Second dataset.
    :param ignore_meta: If ``True``, do not verify if metadata matches.
    :returns: ``True`` or ``False``.
    """

    if not type(a) == type(b):
        return False

    if not a.same_structure(a, b):
        return False

    if not ignore_meta:
        # are all meta data of a also in b, and are they the same value?
        for k, v in a.meta_items():
            if k not in [kk for kk, vv in b.meta_items()]:
                return False
            elif b.meta_val(k) != v:
                return False

        # are all meta data of b also in a?
        for k, v in b.meta_items():
            if k not in [kk for kk, vv in a.meta_items()]:
                return False

    # check all data fields in a
    for dn, dv in a.data_items():

        # are all fields also present in b?
        if dn not in [dnn for dnn, dvv in b.data_items()]:
            return False

        # check if data is equal
        if not num.arrays_equal(
                np.array(a.data_vals(dn)),
                np.array(b.data_vals(dn)),
        ):
            return False

        if not ignore_meta:
            # check meta data
            for k, v in a.meta_items(dn):
                if k not in [kk for kk, vv in b.meta_items(dn)]:
                    return False
                elif v != b.meta_val(k, dn):
                    return False

    # only thing left to check is whether there are items in b but not a
    for dn, dv in b.data_items():
        if dn not in [dnn for dnn, dvv in a.data_items()]:
            return False

        if not ignore_meta:
            for k, v in b.meta_items(dn):
                if k not in [kk for kk, vv in a.meta_items(dn)]:
                    return False

    return True
Example #20
0
    def __eq__(self, other: 'DataDictBase'):
        """Check for content equality of two datadicts."""

        if not self.same_structure(self, other):
            # print('structure')
            return False

        for k, v in self.meta_items():
            if k not in [kk for kk, vv in other.meta_items()]:
                # print(f'{k} not in {other}')
                return False
            elif other.meta_val(k) != v:
                # print(f'{other.meta_val(k)} != {v}')
                return False

        for k, v in other.meta_items():
            if k not in [kk for kk, vv in self.meta_items()]:
                # print(f'{k} not in {self}')
                return False

        for dn, dv in self.data_items():
            # print(dn)
            if dn not in [dnn for dnn, dvv in other.data_items()]:
                # print(f"{dn} not in {other}")
                return False

            if self[dn].get('unit', '') != other[dn].get('unit', ''):
                # print(f"different units for {dn}")
                return False

            if self[dn].get('axes', []) != other[dn].get('axes', []):
                # print(f"different axes for {dn}")
                return False

            if not num.arrays_equal(
                    np.array(self.data_vals(dn)),
                    np.array(other.data_vals(dn)),
            ):
                # print(f"different data for {dn}")
                return False

            for k, v in self.meta_items(dn):
                if k not in [kk for kk, vv in other.meta_items(dn)]:
                    # print(f"{dn}: {k} not in {other}")
                    return False
                elif v != other.meta_val(k, dn):
                    # print(f"{v} != {other.meta_val(k, dn)}")
                    return False

        for dn, dv in other.data_items():
            # print(dn)
            if dn not in [dnn for dnn, dvv in self.data_items()]:
                # print(f"{dn} not in {other}")
                return False

            for k, v in other.meta_items(dn):
                if k not in [kk for kk, vv in self.meta_items(dn)]:
                    # print(f"{dn}: {k} not in {other}")
                    return False

        return True
Example #21
0
    def __eq__(self, other: object) -> bool:
        """Check for content equality of two datadicts."""

        # TODO: require a version that ignores metadata.
        # FIXME: proper comparison of arrays for metadata.
        # FIXME: arrays can be equal even if dtypes are not

        if not isinstance(other, DataDictBase):
            return NotImplemented

        if not self.same_structure(self, other):
            # print('structure')
            return False

        for k, v in self.meta_items():
            if k not in [kk for kk, vv in other.meta_items()]:
                # print(f'{k} not in {other}')
                return False
            elif other.meta_val(k) != v:
                # print(f'{other.meta_val(k)} != {v}')
                return False

        for k, v in other.meta_items():
            if k not in [kk for kk, vv in self.meta_items()]:
                # print(f'{k} not in {self}')
                return False

        for dn, dv in self.data_items():
            # print(dn)
            if dn not in [dnn for dnn, dvv in other.data_items()]:
                # print(f"{dn} not in {other}")
                return False

            if self[dn].get('unit', '') != other[dn].get('unit', ''):
                # print(f"different units for {dn}")
                return False

            if self[dn].get('label', '') != other[dn].get('label', ''):
                # print(f"different labels for {dn}")
                return False

            if self[dn].get('axes', []) != other[dn].get('axes', []):
                # print(f"different axes for {dn}")
                return False

            if not num.arrays_equal(
                    np.array(self.data_vals(dn)),
                    np.array(other.data_vals(dn)),
            ):
                # print(f"different data for {dn}")
                return False

            for k, v in self.meta_items(dn):
                if k not in [kk for kk, vv in other.meta_items(dn)]:
                    # print(f"{dn}: {k} not in {other}")
                    return False
                elif v != other.meta_val(k, dn):
                    # print(f"{v} != {other.meta_val(k, dn)}")
                    return False

        for dn, dv in other.data_items():
            # print(dn)
            if dn not in [dnn for dnn, dvv in self.data_items()]:
                # print(f"{dn} not in {other}")
                return False

            for k, v in other.meta_items(dn):
                if k not in [kk for kk, vv in self.meta_items(dn)]:
                    # print(f"{dn}: {k} not in {other}")
                    return False

        return True
Example #22
0
def combine_datadicts(*dicts: DataDict) -> Union[DataDictBase, DataDict]:
    """
    Try to make one datadict out of multiple.

    Basic rules:

    - we try to maintain the input type
    - return type is 'downgraded' to DataDictBase if the contents are not
      compatible (i.e., different numbers of records in the inputs)

    :returns: combined data
    """

    # TODO: deal correctly with MeshGridData when combined with other types
    # TODO: should we strictly copy all values?
    # TODO: we should try to consolidate axes as much as possible. Currently
    #   axes in the return can be separated even if they match (caused
    #   by earlier mismatches)

    ret = None
    rettype = None

    for d in dicts:
        if ret is None:
            ret = d.copy()
            rettype = type(d)

        else:

            # if we don't have a well defined number of records anymore,
            # need to revert the type to DataDictBase
            if hasattr(d, 'nrecords') and hasattr(ret, 'nrecords'):
                if d.nrecords() != ret.nrecords():
                    rettype = DataDictBase
            else:
                rettype = DataDictBase
            ret = rettype(**ret)

            # First, parse the axes in the to-be-added ddict.
            # if dimensions with same names are present already in the current
            # return ddict and are not compatible with what's to be added,
            # rename the incoming dimension.
            ax_map = {}
            for d_ax in d.axes():
                if d_ax in ret.axes():
                    if num.arrays_equal(d.data_vals(d_ax),
                                        ret.data_vals(d_ax)):
                        ax_map[d_ax] = d_ax
                    else:
                        newax = _find_replacement_name(ret, d_ax)
                        ax_map[d_ax] = newax
                        ret[newax] = d[d_ax]
                elif d_ax in ret.dependents():
                    newax = _find_replacement_name(ret, d_ax)
                    ax_map[d_ax] = newax
                    ret[newax] = d[d_ax]
                else:
                    ax_map[d_ax] = d_ax
                    ret[d_ax] = d[d_ax]

            for d_dep in d.dependents():
                if d_dep in ret:
                    newdep = _find_replacement_name(ret, d_dep)
                else:
                    newdep = d_dep

                dep_axes = [ax_map[ax] for ax in d[d_dep]['axes']]
                ret[newdep] = d[d_dep]
                ret[newdep]['axes'] = dep_axes

    if ret is None:
        ret = DataDict()
    else:
        ret.validate()

    return ret