Beispiel #1
0
def test_gh_4176():
    from dask.sharedict import ShareDict

    def foo(A):
        return A[None, ...]

    A = da.ones(shape=(10, 20, 4), chunks=(2, 5, 4))

    name = 'D'

    dsk = blockwise(
        foo, name, ("nsrc", "ntime", "nbl", "npol"),
        A.name, ("ntime", "nbl", "npol"),
        new_axes={"nsrc": 1},
        numblocks={a.name: a.numblocks for a in (A,)}
    )

    array_dsk = ShareDict()
    array_dsk.update(dsk)
    array_dsk.update(A.__dask_graph__())

    chunks = ((1,),) + A.chunks

    D = da.Array(array_dsk, name, chunks, dtype=A.dtype)
    D.sum(axis=0).compute()
Beispiel #2
0
def test_structure():
    s = ShareDict()
    s.update(a)
    s.update(b)
    s.update(c)

    assert all(any(d is x for d in s.dicts.values())
               for x in [a, b, c])
Beispiel #3
0
def convert_ldicts_to_sdict(blocker_list):
    s = ShareDict()
    for blocker in blocker_list:
        for k, v in blocker.iteritems():
            d = {}
            d[k] = v
            s.update_with_key(d, key=k)
    return s
 def __init__(self, dask, name, n_chunk, slice_dim):
     if not isinstance(dask, ShareDict):
         s = ShareDict()
         s.update_with_key(dask, key=name)
         dask = s
     self.dask = dask
     self.name = name
     self.n_chunk = n_chunk
     self.slice_dim = slice_dim
Beispiel #5
0
def test_core():
    s = ShareDict()
    assert isinstance(s, Mapping)

    s.update(a)
    s.update(b)

    assert s['x'] == 1
    with pytest.raises(KeyError):
        s['abc']

    with pytest.raises((NotImplementedError, TypeError)):
        s['abc'] = 123
def test_core():
    s = ShareDict()
    assert isinstance(s, Mapping)

    s.update(a)
    s.update(b)

    assert s['x'] == 1
    with pytest.raises(KeyError):
        s['abc']

    with pytest.raises((NotImplementedError, TypeError)):
        s['abc'] = 123
Beispiel #7
0
def test_keys_items():
    s = ShareDict()
    s.update_with_key(a, key='a')
    s.update_with_key(b, key='b')
    s.update_with_key(c, key='c')

    d = merge(a, b, c)

    for fn in [dict, set, len]:
        assert fn(s) == fn(d)

    for fn in [lambda x: x.values(), lambda x: x.keys(), lambda x: x.items()]:
        assert set(fn(s)) == set(fn(d))
Beispiel #8
0
def test_ensure_dict():
    d = {'x': 1}
    assert ensure_dict(d) is d
    sd = ShareDict()
    sd.update(d)
    assert type(ensure_dict(sd)) is dict
    assert ensure_dict(sd) == d

    class mydict(dict):
        pass

    md = mydict()
    md['x'] = 1
    assert type(ensure_dict(md)) is dict
    assert ensure_dict(md) == d
Beispiel #9
0
def test_ensure_dict():
    d = {'x': 1}
    assert ensure_dict(d) is d
    sd = ShareDict()
    sd.update(d)
    assert type(ensure_dict(sd)) is dict
    assert ensure_dict(sd) == d

    class mydict(dict):
        pass

    md = mydict()
    md['x'] = 1
    assert type(ensure_dict(md)) is dict
    assert ensure_dict(md) == d
Beispiel #10
0
def test_update_with_sharedict():
    s = ShareDict()
    s.update_with_key(a, key='a')
    s.update_with_key(b, key='b')
    s.update_with_key(c, key='c')

    d = {'z': 5}

    s2 = ShareDict()
    s2.update_with_key(a, key='a')
    s2.update_with_key(d, key='d')

    s.update(s2)

    assert s.dicts['a'] is s.dicts['a']
def test_structure_2():
    s = ShareDict()
    s.update_with_key(a, key='a')
    s.update_with_key(b, key='b')
    s.update_with_key(c, key='c')

    assert s.order == ['a', 'b', 'c']

    s.update_with_key(b, key='b')

    assert s.order == ['a', 'c', 'b']
Beispiel #12
0
def test_structure_2():
    s = ShareDict()
    s.update_with_key(a, key="a")
    s.update_with_key(b, key="b")
    s.update_with_key(c, key="c")

    assert s.order == ["a", "b", "c"]

    s.update_with_key(b, key="b")

    assert s.order == ["a", "c", "b"]
def test_structure():
    s = ShareDict()
    s.update(a)
    s.update(b)
    s.update(c)

    assert all(any(d is x for d in s.dicts.values()) for x in [a, b, c])
Beispiel #14
0
def test_gh_4176():
    with warnings.catch_warnings():
        warnings.simplefilter('ignore')
        from dask.sharedict import ShareDict

    def foo(A):
        return A[None, ...]

    A = da.ones(shape=(10, 20, 4), chunks=(2, 5, 4))

    name = 'D'

    dsk = blockwise(foo,
                    name, ("nsrc", "ntime", "nbl", "npol"),
                    A.name, ("ntime", "nbl", "npol"),
                    new_axes={"nsrc": 1},
                    numblocks={a.name: a.numblocks
                               for a in (A, )})

    array_dsk = ShareDict()
    array_dsk.update(dsk)
    array_dsk.update(A.__dask_graph__())

    chunks = ((1, ), ) + A.chunks

    D = da.Array(array_dsk, name, chunks, dtype=A.dtype)
    D.sum(axis=0).compute()
Beispiel #15
0
def test_structure_2():
    s = ShareDict()
    s.update_with_key(a, key='a')
    s.update_with_key(b, key='b')
    s.update_with_key(c, key='c')

    assert s.order == ['a', 'b', 'c']

    s.update_with_key(b, key='b')

    assert s.order == ['a', 'c', 'b']
def test_keys_items():
    s = ShareDict()
    s.update_with_key(a, key='a')
    s.update_with_key(b, key='b')
    s.update_with_key(c, key='c')

    d = merge(a, b, c)

    for fn in [dict, set, len]:
        assert fn(s) == fn(d)

    for fn in [lambda x: x.values(), lambda x: x.keys(), lambda x: x.items()]:
        assert set(fn(s)) == set(fn(d))
Beispiel #17
0
def predict_vis(time_index, antenna1, antenna2,
                dde1_jones=None, source_coh=None, dde2_jones=None,
                die1_jones=None, base_vis=None, die2_jones=None):

    have_a1 = dde1_jones is not None
    have_a2 = dde2_jones is not None
    have_bl = source_coh is not None
    have_g1 = die1_jones is not None
    have_coh = base_vis is not None
    have_g2 = die2_jones is not None

    if have_a1 ^ have_a2:
        raise ValueError("Both dde1_jones and dde2_jones "
                         "must be present or absent")

    have_ants = have_a1 and have_a2

    if have_ants:
        if dde1_jones.shape[2] != dde1_jones.chunks[2][0]:
            raise ValueError("Subdivision of antenna dimension into "
                             "multiple chunks is not supported.")

        if dde2_jones.shape[2] != dde2_jones.chunks[2][0]:
            raise ValueError("Subdivision of antenna dimension into "
                             "multiple chunks is not supported.")

        if dde1_jones.chunks != dde2_jones.chunks:
            raise ValueError("dde1_jones.chunks != dde2_jones.chunks")

        if len(dde1_jones.chunks[1]) != len(time_index.chunks[0]):
            raise ValueError("Number of row chunks (%s) does not equal "
                             "number of time chunks (%s)." %
                             (time_index.chunks[0], dde1_jones.chunks[1]))

    if have_g1 ^ have_g2:
        raise ValueError("Both die1_jones and die2_jones "
                         "must be present or absent")

    have_dies = have_g1 and have_g2

    if have_dies:
        if die1_jones.shape[1] != die1_jones.chunks[1][0]:
            raise ValueError("Subdivision of antenna dimension into "
                             "multiple chunks is not supported.")

        if die2_jones.shape[1] != die2_jones.chunks[1][0]:
            raise ValueError("Subdivision of antenna dimension into "
                             "multiple chunks is not supported.")

        if die1_jones.chunks != die2_jones.chunks:
            raise ValueError("die1_jones.chunks != die2_jones.chunks")

        if len(die1_jones.chunks[0]) != len(time_index.chunks[0]):
            raise ValueError("Number of row chunks (%s) does not equal "
                             "number of time chunks (%s)." %
                             (time_index.chunks[0], die1_jones.chunks[1]))

    # Generate strings for the correlation dimensions
    if have_ants:
        cdims = tuple("corr-%d" % i for i in range(len(dde1_jones.shape[4:])))
    elif have_bl:
        cdims = tuple("corr-%d" % i for i in range(len(source_coh.shape[3:])))
    elif have_dies:
        cdims = tuple("corr-%d" % i for i in range(len(die1_jones.shape[3:])))
    else:
        raise ValueError("Missing both antenna and baseline jones terms")

    # Infer the output dtype
    dtype_arrays = [dde1_jones, source_coh, dde2_jones, die1_jones, die2_jones]
    out_dtype = np.result_type(*(np.dtype(a.dtype.name)
                                 for a in dtype_arrays if a is not None))

    # In the case of predict_vis, the "row" and "time" dimensions
    # are intimately related -- a contiguous series of rows
    # are related to a contiguous series of timesteps.
    # This means that the number of chunks of these
    # two dimensions must match even though the chunk sizes may not.
    # da.core.atop insists on matching chunk sizes.
    # For this reason, we use the lower level da.core.top and
    # substitute "row" for "time" in arrays such as dde1_jones
    # and die1_jones.
    token = da.core.tokenize(time_index, antenna1, antenna2,
                             dde1_jones, source_coh, dde2_jones, base_vis)

    ajones_dims = ("src", "row", "ant", "chan") + cdims
    gjones_dims = ("row", "ant", "chan") + cdims

    # Setup
    # 1. Optional top arguments
    # 2. Optional numblocks kwarg
    # 3. dask graph inputs
    array_dsk = ShareDict()
    top_args = [time_index.name, ("row",),
                antenna1.name, ("row",),
                antenna2.name, ("row",)]
    numblocks = {
        time_index.name: time_index.numblocks,
        antenna1.name: antenna1.numblocks,
        antenna2.name: antenna2.numblocks
    }

    # Merge input graphs into the top graph
    array_dsk.update(time_index.__dask_graph__())
    array_dsk.update(antenna1.__dask_graph__())
    array_dsk.update(antenna2.__dask_graph__())

    # Handle presence/absence of dde1_jones
    if have_ants:
        top_args.extend([dde1_jones.name, ajones_dims])
        numblocks[dde1_jones.name] = dde1_jones.numblocks
        array_dsk.update(dde1_jones.__dask_graph__())
        other_chunks = dde1_jones.chunks[3:]
        src_chunks = dde1_jones.chunks[0]
    else:
        top_args.extend([None, None])

    # Handle presence/absence of source_coh
    if have_bl:
        top_args.extend([source_coh.name, ("src", "row", "chan") + cdims])
        numblocks[source_coh.name] = source_coh.numblocks
        other_chunks = source_coh.chunks[2:]
        src_chunks = source_coh.chunks[0]
        array_dsk.update(source_coh.__dask_graph__())
    else:
        top_args.extend([None, None])

    # Handle presence/absence of dde2_jones
    if have_ants:
        top_args.extend([dde2_jones.name, ajones_dims])
        numblocks[dde2_jones.name] = dde2_jones.numblocks
        other_chunks = dde1_jones.chunks[3:]
        array_dsk.update(dde2_jones.__dask_graph__())
        other_chunks = dde2_jones.chunks[3:]
        src_chunks = dde1_jones.chunks[0]
    else:
        top_args.extend([None, None])

    # die1_jones, base_vis and die2_jones absent for this part of the graph
    top_args.extend([None, None, None, None, None, None])

    assert len(top_args) // 2 == 9, len(top_args) // 2

    name = "-".join(("predict_vis", token))
    dsk = da.core.top(_predict_coh_wrapper,
                      name, ("src", "row", "chan") + cdims,
                      *top_args,
                      numblocks=numblocks)

    array_dsk.update(dsk)

    # We can infer output chunk sizes from source_coh
    chunks = ((1,)*len(src_chunks), time_index.chunks[0],) + other_chunks

    sum_coherencies = da.Array(array_dsk, name, chunks, dtype=out_dtype)
    sum_coherencies = sum_coherencies.sum(axis=0)

    if have_coh:
        sum_coherencies += base_vis

    if not have_dies:
        return sum_coherencies

    # Now apply any Direction Independent Effect Terms

    # Setup
    # 1. Optional top arguments
    # 2. Optional numblocks kwarg
    # 3. dask graph inputs
    array_dsk = ShareDict()
    top_args = [time_index.name, ("row",),
                antenna1.name, ("row",),
                antenna2.name, ("row",)]
    numblocks = {
        time_index.name: time_index.numblocks,
        antenna1.name: antenna1.numblocks,
        antenna2.name: antenna2.numblocks
    }

    array_dsk.update(time_index.__dask_graph__())
    array_dsk.update(antenna1.__dask_graph__())
    array_dsk.update(antenna2.__dask_graph__())

    # dde1_jones, source_coh  and dde2_jones not present
    top_args.extend([None, None, None, None, None, None])

    top_args.extend([die1_jones.name, gjones_dims])
    top_args.extend([sum_coherencies.name, ("row", "chan") + cdims])
    top_args.extend([die2_jones.name, gjones_dims])
    numblocks[die1_jones.name] = die1_jones.numblocks
    numblocks[sum_coherencies.name] = sum_coherencies.numblocks
    numblocks[die2_jones.name] = die2_jones.numblocks
    array_dsk.update(die1_jones.__dask_graph__())
    array_dsk.update(sum_coherencies.__dask_graph__())
    array_dsk.update(die2_jones.__dask_graph__())

    assert len(top_args) // 2 == 9

    token = da.core.tokenize(time_index, antenna1, antenna2,
                             die1_jones, sum_coherencies, die2_jones)
    name = '-'.join(("predict_vis", token))
    dsk = da.core.top(_predict_dies_wrapper,
                      name, ("row", "chan") + cdims,
                      *top_args, numblocks=numblocks)
    array_dsk.update(dsk)

    chunks = (time_index.chunks[0],) + other_chunks

    return da.Array(array_dsk, name, chunks, dtype=out_dtype)
Beispiel #18
0
def test_update_with_sharedict():
    s = ShareDict()
    s.update_with_key(a, key="a")
    s.update_with_key(b, key="b")
    s.update_with_key(c, key="c")

    d = {"z": 5}

    s2 = ShareDict()
    s2.update_with_key(a, key="a")
    s2.update_with_key(d, key="d")

    s.update(s2)

    assert s.dicts["a"] is s.dicts["a"]
def test_update_with_sharedict():
    s = ShareDict()
    s.update_with_key(a, key='a')
    s.update_with_key(b, key='b')
    s.update_with_key(c, key='c')

    d = {'z': 5}

    s2 = ShareDict()
    s2.update_with_key(a, key='a')
    s2.update_with_key(d, key='d')

    s.update(s2)

    assert s.dicts['a'] is s.dicts['a']