Пример #1
0
    def test_write_3d(self):
        h5py = import_or_skip('h5py')
        shape = (4, 5, 3)
        source = np.random.random(shape)

        dac = Context(self.client)
        dist = {0: 'b', 1: 'c', 2: 'n'}
        da = dac.empty(shape, dist=dist)

        for i in range(shape[0]):
            for j in range(shape[1]):
                for k in range(shape[2]):
                    da[i, j, k] = source[i, j, k]

        output_path = temp_filepath('.hdf5')

        try:
            dac.save_hdf5(output_path, da, mode='w')

            self.assertTrue(os.path.exists(output_path))

            with h5py.File(output_path, 'r') as fp:
                self.assertTrue("buffer" in fp)
                assert_allclose(source, fp["buffer"])

        finally:
            if os.path.exists(output_path):
                os.remove(output_path)
Пример #2
0
 def test_load_nu(self):
     distribution = Distribution.from_dim_data_per_rank(self.context,
                                                        nu_test_data)
     da = self.context.load_npy(self.output_path, distribution)
     for i in range(da.shape[0]):
         for j in range(da.shape[1]):
             self.assertEqual(da[i, j], self.expected[i, j])
Пример #3
0
 def test_from_global_dim_data_bc(self):
     """ Test creation of a block-cyclic array. """
     rows, cols = 5, 9
     global_dim_data = (
             # dim 0
             {
                 'dist_type': 'c',
                 'proc_grid_size': 2,
                 'size': rows,
                 'block_size': 2,
             },
             # dim 1
             {
                 'dist_type': 'c',
                 'proc_grid_size': 2,
                 'size': cols,
                 'block_size': 2,
             },)
     distribution = Distribution(self.context, global_dim_data)
     distarr = DistArray(distribution, dtype=int)
     for i in range(rows):
         for j in range(cols):
             distarr[i, j] = i*cols + j
     las = distarr.get_localarrays()
     local_shapes = [la.local_shape for la in las]
     self.assertSequenceEqual(local_shapes, [(3,5), (3,4), (2,5), (2,4)])
Пример #4
0
    def test_set_and_getitem_block_dist(self):
        size = 10
        dap = self.dac.empty((size,), dist={0: 'b'})

        for val in range(size):
            dap[val] = val

        for val in range(size):
            self.assertEqual(dap[val], val)
Пример #5
0
 def test_2D_cc(self):
     nrows, ncols = 3, 5
     nprocs_per_dim = 2
     cm = Distribution(self.context, (nrows, ncols), ("c", "c"), (nprocs_per_dim, nprocs_per_dim))
     for r in range(nrows):
         for c in range(ncols):
             rank = (r % nprocs_per_dim) * nprocs_per_dim + (c % nprocs_per_dim)
             actual = cm.owning_ranks((r, c))
             self.assertSequenceEqual(actual, [rank])
Пример #6
0
 def test_2D_cc(self):
     nrows, ncols = 3, 5
     nprocs_per_dim = 2
     cm = client_map.Distribution.from_shape(
             self.ctx, (nrows, ncols), ('c', 'c'),
             (nprocs_per_dim, nprocs_per_dim))
     for r in range(nrows):
         for c in range(ncols):
             rank = (r % nprocs_per_dim) * nprocs_per_dim + (c % nprocs_per_dim)
             actual = cm.owning_ranks((r,c))
             self.assertSequenceEqual(actual, [rank])
Пример #7
0
 def test_2D_bb(self):
     nrows, ncols = 3, 5
     nprocs_per_dim = 2
     cm = Distribution(self.context, (nrows, ncols), ("b", "b"), (nprocs_per_dim, nprocs_per_dim))
     row_chunks = nrows // nprocs_per_dim + 1
     col_chunks = ncols // nprocs_per_dim + 1
     for r in range(nrows):
         for c in range(ncols):
             rank = (r // row_chunks) * nprocs_per_dim + (c // col_chunks)
             actual = cm.owning_ranks((r, c))
             self.assertSequenceEqual(actual, [rank])
Пример #8
0
    def test_set_and_getitem_cyclic_dist(self):
        size = 10
        dap = self.dac.empty((size,), dist={0: 'c'})

        for val in range(size):
            dap[val] = val
            self.assertEqual(dap[val], val)

        for i in range(1, size + 1):
            dap[-i] = i
            self.assertEqual(dap[-i], i)
Пример #9
0
 def test_from_global_dim_data_uu(self):
     rows = 6
     cols = 20
     row_ixs = numpy.random.permutation(range(rows))
     col_ixs = numpy.random.permutation(range(cols))
     row_indices = [row_ixs[: rows // 2], row_ixs[rows // 2 :]]
     col_indices = [col_ixs[: cols // 4], col_ixs[cols // 4 :]]
     glb_dim_data = ({"dist_type": "u", "indices": row_indices}, {"dist_type": "u", "indices": col_indices})
     distribution = Distribution.from_global_dim_data(self.context, glb_dim_data)
     distarr = DistArray(distribution, dtype=int)
     distarr.toarray()
Пример #10
0
 def test_2D_cc(self):
     nrows, ncols = 3, 5
     nprocs_per_dim = 2
     cm = Distribution(self.context, (nrows, ncols), ('c', 'c'),
                       (nprocs_per_dim, nprocs_per_dim))
     for r in range(nrows):
         for c in range(ncols):
             rank = ((r % nprocs_per_dim) * nprocs_per_dim +
                     (c % nprocs_per_dim))
             actual = cm.owning_ranks((r, c))
             self.assertSequenceEqual(actual, [rank])
Пример #11
0
    def test_set_and_getitem_cyclic_dist(self):
        size = 10
        distribution = Distribution(self.context, (size, ), dist={0: 'c'})
        dap = self.context.empty(distribution)

        for val in range(size):
            dap[val] = val
            self.assertEqual(dap[val], val)

        for i in range(1, size + 1):
            dap[-i] = i
            self.assertEqual(dap[-i], i)
Пример #12
0
 def test_2D_bb(self):
     nrows, ncols = 3, 5
     nprocs_per_dim = 2
     cm = Distribution(self.context, (nrows, ncols), ('b', 'b'),
                       (nprocs_per_dim, nprocs_per_dim))
     row_chunks = nrows // nprocs_per_dim + 1
     col_chunks = ncols // nprocs_per_dim + 1
     for r in range(nrows):
         for c in range(ncols):
             rank = (r // row_chunks) * nprocs_per_dim + (c // col_chunks)
             actual = cm.owning_ranks((r, c))
             self.assertSequenceEqual(actual, [rank])
Пример #13
0
    def test_set_and_getitem_cyclic_dist(self):
        size = 10
        distribution = Distribution(self.context, (size,), dist={0: "c"})
        dap = self.context.empty(distribution)

        for val in range(size):
            dap[val] = val
            self.assertEqual(dap[val], val)

        for i in range(1, size + 1):
            dap[-i] = i
            self.assertEqual(dap[-i], i)
Пример #14
0
    def test_set_and_getitem_cyclic_dist(self):
        size = 10
        distribution = Distribution.from_shape(self.dac, (size,),
                                               dist={0: 'c'})
        dap = self.dac.empty(distribution)

        for val in range(size):
            dap[val] = val
            self.assertEqual(dap[val], val)

        for i in range(1, size + 1):
            dap[-i] = i
            self.assertEqual(dap[-i], i)
Пример #15
0
 def test_2D_bb(self):
     nrows, ncols = 3, 5
     nprocs_per_dim = 2
     cm = client_map.Distribution.from_shape(
             self.ctx, (nrows, ncols), ('b', 'b'),
             (nprocs_per_dim, nprocs_per_dim))
     row_chunks = nrows // nprocs_per_dim + 1
     col_chunks = ncols // nprocs_per_dim + 1
     for r in range(nrows):
         for c in range(ncols):
             rank = (r // row_chunks) * nprocs_per_dim + (c // col_chunks)
             actual = cm.owning_ranks((r,c))
             self.assertSequenceEqual(actual, [rank])
Пример #16
0
    def test_set_and_getitem_nd_block_dist(self):
        size = 5
        dap = self.dac.empty((size, size), dist={0: 'b', 1: 'b'})

        for row in range(size):
            for col in range(size):
                val = size*row + col
                dap[row, col] = val

        for row in range(size):
            for col in range(size):
                val = size*row + col
                self.assertEqual(dap[row, col], val)
Пример #17
0
 def test_compare_bcm_cm_local_index(self):
     """Test Block-Cyclic against Cyclic map."""
     start = 1
     size = 16
     grid = 4
     block = 1
     dimdict = dict(start=start, size=size, proc_grid_size=grid,
                    block_size=block, proc_grid_rank=start)
     bcm = maps.map_from_dim_dict(dict(list(dimdict.items()) +
                                           [('dist_type', 'c')]))
     cm = maps.map_from_dim_dict(dict(list(dimdict.items()) +
                                          [('dist_type', 'c')]))
     bcm_lis = [bcm.local_from_global_index(e) for e in range(1, 16, 4)]
     cm_lis = [cm.local_from_global_index(e) for e in range(1, 16, 4)]
     self.assertSequenceEqual(bcm_lis, cm_lis)
Пример #18
0
    def test_set_and_getitem_nd_block_dist(self):
        size = 5
        distribution = Distribution(self.context, (size, size), dist={0: "b", 1: "b"})
        dap = self.context.empty(distribution)

        for row in range(size):
            for col in range(size):
                val = size * row + col
                dap[row, col] = val
                self.assertEqual(dap[row, col], val)

        for row in range(1, size + 1):
            for col in range(1, size + 1):
                dap[-row, -col] = row + col
                self.assertEqual(dap[-row, -col], row + col)
Пример #19
0
    def test_set_and_getitem_block_dist(self):
        size = 10
        distribution = Distribution.from_shape(self.context, (size,),
                                               dist={0: 'b'})
        dap = self.context.empty(distribution)

        for val in range(size):
            dap[val] = val

        for val in range(size):
            self.assertEqual(dap[val], val)

        for i in range(1, size + 1):
            dap[-i] = i
            self.assertEqual(dap[-i], i)
Пример #20
0
    def test_set_and_getitem_nd_block_dist(self):
        size = 5
        distribution = Distribution.from_shape(self.dac, (size, size),
                                               dist={0: 'b', 1: 'b'})
        dap = self.dac.empty(distribution)

        for row in range(size):
            for col in range(size):
                val = size*row + col
                dap[row, col] = val
                self.assertEqual(dap[row, col], val)

        for row in range(1, size + 1):
            for col in range(1, size + 1):
                dap[-row, -col] = row + col
                self.assertEqual(dap[-row, -col], row + col)
Пример #21
0
    def test_not_compatible(self):
        dist_b1 = Distribution(self.context, (10, ), ('b', ), (1, ),
                               targets=[0])
        dist_b2 = Distribution(self.context, (9, ), ('b', ), (1, ),
                               targets=[0])

        self.assertFalse(dist_b1.is_compatible(dist_b2))
        self.assertFalse(dist_b2.is_compatible(dist_b1))

        dist_b3 = Distribution(self.context, (10, ), ('b', ), (2, ),
                               targets=[0, 1])

        self.assertFalse(dist_b1.is_compatible(dist_b3))
        self.assertFalse(dist_b3.is_compatible(dist_b1))

        dist_b4 = Distribution(self.context, (10, ), ('c', ), (2, ),
                               targets=[0, 1])

        self.assertFalse(dist_b4.is_compatible(dist_b3))
        self.assertFalse(dist_b3.is_compatible(dist_b4))

        gdd_unstructured = ({
            'dist_type': 'u',
            'indices': [range(10)],
        }, )
        dist_u = Distribution.from_global_dim_data(self.context,
                                                   gdd_unstructured)

        self.assertFalse(dist_u.is_compatible(dist_b1))
        self.assertFalse(dist_b1.is_compatible(dist_u))
Пример #22
0
def _map_from_axis_dim_dicts(axis_dim_dicts):
    """ Generates a ClientMap instance from a sanitized sequence of
    dimension dictionaries.

    Parameters
    ----------
    axis_dim_dicts: sequence of dictionaries
        Each dictionary is a "dimension dictionary" from the distributed array
        protocol, one per process in this dimension of the process grid.  The
        dimension dictionaries shall all have the same keys and values for
        global attributes: `dist_type`, `size`, `proc_grid_size`, and perhaps
        others.

    Returns
    -------
        An instance of a subclass of MapBase.

    """
    # check that all processes / ranks are accounted for.
    proc_ranks = sorted(dd['proc_grid_rank'] for dd in axis_dim_dicts)
    if proc_ranks != list(range(len(axis_dim_dicts))):
        msg = "Ranks of processes (%r) not consistent."
        raise ValueError(msg % proc_ranks)
    # Sort axis_dim_dicts according to proc_grid_rank.
    axis_dim_dicts = sorted(axis_dim_dicts, key=lambda d: d['proc_grid_rank'])

    dist_type = axis_dim_dicts[0]['dist_type']
    map_class = choose_map(dist_type)
    return map_class.from_axis_dim_dicts(axis_dim_dicts)
Пример #23
0
def cmap_discretize(cmap, N):
    """Create a discrete colormap from the continuous colormap cmap.

    Parameters
    ----------
    cmap : colormap instance, or string
        The continuous colormap, as object or name, to make discrete.
        For example, matplotlib.cm.jet, or 'jet'.
    N : int
        The number of discrete colors desired.

    Returns
    -------
    colormap
        The desired discrete colormap.

    Example
    -------
    >>> x = resize(arange(100), (5,100))
    >>> djet = cmap_discretize(cm.jet, 5)
    >>> pyplot.imshow(x, cmap=djet)
    """
    # This is copied from:
    # http://wiki.scipy.org/Cookbook/Matplotlib/ColormapTransformations
    if type(cmap) == str:
        cmap = cm.get_cmap(cmap)
    colors_i = concatenate((linspace(0, 1., N), (0., 0., 0., 0.)))
    colors_rgba = cmap(colors_i)
    indices = linspace(0, 1., N + 1)
    cdict = {}
    for ki, key in enumerate(('red', 'green', 'blue')):
        cdict[key] = [(indices[i], colors_rgba[i - 1, ki], colors_rgba[i, ki])
                      for i in range(N + 1)]
    # Return colormap object.
    return colors.LinearSegmentedColormap(cmap.name + "_%d" % N, cdict, 1024)
Пример #24
0
 def test_gh_435_regression_with_var(self):
     dist = Distribution(self.context,
                         shape=(14, ),
                         dist=('b'),
                         targets=range(4))
     darr = self.context.ones(dist)
     darr.var()
Пример #25
0
def _map_from_axis_dim_dicts(axis_dim_dicts):
    """ Generates a ClientMap instance from a sanitized sequence of
    dimension dictionaries.

    Parameters
    ----------
    axis_dim_dicts: sequence of dictionaries
        Each dictionary is a "dimension dictionary" from the distributed array
        protocol, one per process in this dimension of the process grid.  The
        dimension dictionaries shall all have the same keys and values for
        global attributes: `dist_type`, `size`, `proc_grid_size`, and perhaps
        others.

    Returns
    -------
        An instance of a subclass of MapBase.

    """
    # check that all processes / ranks are accounted for.
    proc_ranks = sorted(dd['proc_grid_rank'] for dd in axis_dim_dicts)
    if proc_ranks != list(range(len(axis_dim_dicts))):
        msg = "Ranks of processes (%r) not consistent."
        raise ValueError(msg % proc_ranks)
    # Sort axis_dim_dicts according to proc_grid_rank.
    axis_dim_dicts = sorted(axis_dim_dicts, key=lambda d: d['proc_grid_rank'])

    dist_type = axis_dim_dicts[0]['dist_type']
    map_class = choose_map(dist_type)
    return map_class.from_axis_dim_dicts(axis_dim_dicts)
Пример #26
0
 def test_block_redist_2D_one_to_many(self):
     source_dist = Distribution(self.context, (9, 9), ('b', 'b'), (1, 1),
                                targets=[2])
     dest_dist = Distribution(self.context, (9, 9), ('b', 'b'), (2, 2),
                              targets=range(4))
     plan = source_dist.get_redist_plan(dest_dist)
     expected = [
         {
             'source_rank': 2,
             'dest_rank': 0,
             'indices': [(0, 5, 1), (0, 5, 1)]
         },
         {
             'source_rank': 2,
             'dest_rank': 1,
             'indices': [(0, 5, 1), (5, 9, 1)]
         },
         {
             'source_rank': 2,
             'dest_rank': 2,
             'indices': [(5, 9, 1), (0, 5, 1)]
         },
         {
             'source_rank': 2,
             'dest_rank': 3,
             'indices': [(5, 9, 1), (5, 9, 1)]
         },
     ]
     for p, e in zip(plan, expected):
         self.assertEqual(p, e)
Пример #27
0
    def test_writing_two_datasets(self):
        h5py = import_or_skip('h5py')

        datalen = 33
        dac = Context(self.client)
        da = dac.empty((datalen,), dist={0: 'b'})

        for i in range(datalen):
            da[i] = i

        output_path = temp_filepath('.hdf5')

        try:
            # make a file, and write to dataset 'foo'
            with h5py.File(output_path, 'w') as fp:
                fp['foo'] = np.arange(10)

            # try saving to a different dataset
            dac.save_hdf5(output_path, da, key='bar', mode='a')

            with h5py.File(output_path, 'r') as fp:
                self.assertTrue("foo" in fp)
                self.assertTrue("bar" in fp)

        finally:
            if os.path.exists(output_path):
                os.remove(output_path)
Пример #28
0
 def __init__(self, global_size, grid_size, grid_rank, indices):
     self.global_size = global_size
     self.grid_size = grid_size
     self.grid_rank = grid_rank
     self.indices =  np.asarray(indices)
     self.local_size = len(self.indices)
     local_indices = range(self.local_size)
     self._local_index = dict(zip(self.indices, local_indices))
Пример #29
0
 def test_no_empty_local_arrays_3_targets(self):
     for n in range(1, 20):
         dist = Distribution(self.context,
                             shape=(n, ),
                             dist=('b', ),
                             targets=self.context.targets[:3])
         for ls in dist.localshapes():
             self.assertNotIn(0, ls)
Пример #30
0
 def __init__(self, global_size, grid_size, grid_rank, indices):
     self.global_size = global_size
     self.grid_size = grid_size
     self.grid_rank = grid_rank
     self.indices = np.asarray(indices)
     self.local_size = len(self.indices)
     local_indices = range(self.local_size)
     self._local_index = dict(zip(self.indices, local_indices))
Пример #31
0
 def test_2D_bn(self):
     nrows, ncols = 31, 53
     cm = Distribution(self.context, (nrows, ncols), {0: 'b'}, (4, 1))
     chunksize = (nrows // 4) + 1
     for _ in range(100):
         r, c = randrange(nrows), randrange(ncols)
         rank = r // chunksize
         self.assertSequenceEqual(cm.owning_ranks((r, c)), [rank])
Пример #32
0
    def get_redist_plan(self, other_dist):
        # Get all targets
        all_targets = sorted(set(self.targets + other_dist.targets))
        union_rank_from_target = {t: r for (r, t) in enumerate(all_targets)}

        source_ranks = range(len(self.targets))
        source_targets = self.targets
        union_rank_from_source_rank = {
            sr: union_rank_from_target[st]
            for (sr, st) in zip(source_ranks, source_targets)
        }

        dest_ranks = range(len(other_dist.targets))
        dest_targets = other_dist.targets
        union_rank_from_dest_rank = {
            sr: union_rank_from_target[st]
            for (sr, st) in zip(dest_ranks, dest_targets)
        }

        source_ddpr = self.get_dim_data_per_rank()
        dest_ddpr = other_dist.get_dim_data_per_rank()
        source_dest_pairs = product(source_ddpr, dest_ddpr)

        if self.shape == other_dist.shape:
            _intersection = Distribution._redist_intersection_same_shape
        else:
            _intersection = Distribution._redist_intersection_reshape

        plan = []
        for source_dd, dest_dd in source_dest_pairs:
            intersections = _intersection(source_dd, dest_dd)
            if intersections and all(i for i in intersections):
                source_coords = tuple(dd['proc_grid_rank'] for dd in source_dd)
                source_rank = self.rank_from_coords[source_coords]
                dest_coords = tuple(dd['proc_grid_rank'] for dd in dest_dd)
                dest_rank = other_dist.rank_from_coords[dest_coords]
                plan.append({
                    'source_rank':
                    union_rank_from_source_rank[source_rank],
                    'dest_rank':
                    union_rank_from_dest_rank[dest_rank],
                    'indices':
                    intersections,
                })

        return plan
Пример #33
0
    def test_save_3d(self):
        shape = (4, 5, 3)
        source = np.random.random(shape)

        dist = {0: 'b', 1: 'c', 2: 'n'}
        distribution = Distribution.from_shape(self.dac, shape, dist=dist)
        da = self.dac.empty(distribution)

        for i in range(shape[0]):
            for j in range(shape[1]):
                for k in range(shape[2]):
                    da[i, j, k] = source[i, j, k]

        self.dac.save_hdf5(self.output_path, da, mode='w')
        with self.h5py.File(self.output_path, 'r') as fp:
            self.assertTrue("buffer" in fp)
            assert_allclose(source, fp["buffer"])
Пример #34
0
 def __init__(self, size, grid_size, indices=None):
     self.size = size
     self.grid_size = grid_size
     self.indices = indices
     if self.indices is not None:
         # Convert to NumPy arrays if not already.
         self.indices = [np.asarray(ind) for ind in self.indices]
     self._index_owners = range(self.grid_size)
Пример #35
0
 def get_dimdicts(self):
     return tuple(({'dist_type': 'c',
                     'size': self.size,
                     'proc_grid_size': self.grid_size,
                     'proc_grid_rank': grid_rank,
                     'start': grid_rank * self.block_size,
                     'block_size': self.block_size,
                     }) for grid_rank in range(self.grid_size))
Пример #36
0
 def __init__(self, size, grid_size, indices=None):
     self.size = size
     self.grid_size = grid_size
     self.indices = indices
     if self.indices is not None:
         # Convert to NumPy arrays if not already.
         self.indices = [np.asarray(ind) for ind in self.indices]
     self._owners = range(self.grid_size)
Пример #37
0
 def test_2D_bn(self):
     nrows, ncols = 31, 53
     cm = Distribution(self.context, (nrows, ncols), {0: "b"}, (4, 1))
     chunksize = (nrows // 4) + 1
     for _ in range(100):
         r, c = randrange(nrows), randrange(ncols)
         rank = r // chunksize
         self.assertSequenceEqual(cm.owning_ranks((r, c)), [rank])
Пример #38
0
 def test_irregular_block_assignment(self):
     global_shape = (5, 9)
     global_dim_data = (
             {
                 'dist_type': 'b',
                 'bounds': (0, 5),
             },
             {
                 'dist_type': 'b',
                 'bounds': (0, 2, 6, 7, 9),
             }
         )
     distribution = Distribution(self.context, global_dim_data)
     distarr = DistArray(distribution, dtype=int)
     for i in range(global_shape[0]):
         for j in range(global_shape[1]):
             distarr[i, j] = i + j
Пример #39
0
 def test_2D_bn(self):
     nrows, ncols = 31, 53
     cm = client_map.Distribution.from_shape(self.ctx, (nrows, ncols),
                                             {0: 'b'}, (4, 1))
     chunksize = (nrows // 4) + 1
     for _ in range(100):
         r, c = randrange(nrows), randrange(ncols)
         rank = r // chunksize
         self.assertSequenceEqual(cm.owning_ranks((r,c)), [rank])
Пример #40
0
 def test_from_global_dim_data_uu(self):
     rows = 6
     cols = 20
     row_ixs = numpy.random.permutation(range(rows))
     col_ixs = numpy.random.permutation(range(cols))
     row_indices = [row_ixs[:rows//2], row_ixs[rows//2:]]
     col_indices = [col_ixs[:cols//4], col_ixs[cols//4:]]
     glb_dim_data = (
             {'dist_type': 'u',
                 'indices': row_indices},
             {'dist_type': 'u',
                 'indices' : col_indices},
             )
     distribution = Distribution(self.context, glb_dim_data)
     distarr = DistArray(distribution, dtype=int)
     for i in range(rows):
         for j in range(cols):
             distarr[i, j] = i*cols + j
Пример #41
0
    def test_compare_bcm_bm_local_index(self):
        """Test Block-Cyclic against Block map."""
        start = 4
        size = 16
        grid = 4
        block = size // grid
        dimdict = dict(start=start, size=size, proc_grid_size=grid)

        bcm = maps.map_from_dim_dict(dict(list(dimdict.items()) +
                                              [('dist_type', 'c'),
                                               ('block_size', block)]))
        bm = maps.map_from_dim_dict(dict(list(dimdict.items()) +
                                             [('dist_type', 'b'),
                                              ('stop', size // grid +
                                                       start)]))
        bcm_lis = [bcm.local_from_global_index(e) for e in range(4, 8)]
        bm_lis = [bm.local_from_global_index(e) for e in range(4, 8)]
        self.assertSequenceEqual(bcm_lis, bm_lis)
Пример #42
0
 def get_dimdicts(self):
     return tuple(({
         'dist_type': 'c',
         'size': self.size,
         'proc_grid_size': self.grid_size,
         'proc_grid_rank': grid_rank,
         'start': grid_rank * self.block_size,
         'block_size': self.block_size,
     }) for grid_rank in range(self.grid_size))
Пример #43
0
 def test_compare_bcm_cm_local_index(self):
     """Test Block-Cyclic against Cyclic map."""
     start = 1
     size = 16
     grid = 4
     block = 1
     dimdict = dict(start=start,
                    size=size,
                    proc_grid_size=grid,
                    block_size=block,
                    proc_grid_rank=start)
     bcm = maps.map_from_dim_dict(
         dict(list(dimdict.items()) + [('dist_type', 'c')]))
     cm = maps.map_from_dim_dict(
         dict(list(dimdict.items()) + [('dist_type', 'c')]))
     bcm_lis = [bcm.local_from_global_index(e) for e in range(1, 16, 4)]
     cm_lis = [cm.local_from_global_index(e) for e in range(1, 16, 4)]
     self.assertSequenceEqual(bcm_lis, cm_lis)
Пример #44
0
    def test_set_and_getitem_nd_block_dist(self):
        size = 5
        distribution = Distribution(self.context, (size, size),
                                    dist={
                                        0: 'b',
                                        1: 'b'
                                    })
        dap = self.context.empty(distribution)

        for row in range(size):
            for col in range(size):
                val = size * row + col
                dap[row, col] = val
                self.assertEqual(dap[row, col], val)

        for row in range(1, size + 1):
            for col in range(1, size + 1):
                dap[-row, -col] = row + col
                self.assertEqual(dap[-row, -col], row + col)
Пример #45
0
    def test_compare_bcm_bm_local_index(self):
        """Test Block-Cyclic against Block map."""
        start = 4
        size = 16
        grid = 4
        block = size // grid
        dimdict = dict(start=start, size=size, proc_grid_size=grid)

        bcm = maps.map_from_dim_dict(
            dict(
                list(dimdict.items()) +
                [('dist_type', 'c'), ('block_size', block)]))
        bm = maps.map_from_dim_dict(
            dict(
                list(dimdict.items()) +
                [('dist_type', 'b'), ('stop', size // grid + start)]))
        bcm_lis = [bcm.local_from_global_index(e) for e in range(4, 8)]
        bm_lis = [bm.local_from_global_index(e) for e in range(4, 8)]
        self.assertSequenceEqual(bcm_lis, bm_lis)
Пример #46
0
def cyclic(dd):
    """Return the global indices owned by this (block-)cyclically-distributed
    process.

    Requires 'start', 'size', 'proc_grid_size', and (optionally) 'block_size'
    keys.  If 'block_size' key does not exist, it is set to 1.
    """
    dd.setdefault("block_size", 1)
    nblocks = int(ceil(dd["size"] / dd["block_size"]))
    block_indices = range(0, nblocks, dd["proc_grid_size"])

    global_indices = []
    for block_index in block_indices:
        block_start = block_index * dd["block_size"] + dd["start"]
        block_stop = block_start + dd["block_size"]
        block = range(block_start, min(block_stop, dd["size"]))
        global_indices.extend(block)

    return global_indices
Пример #47
0
    def test_redist_2D(self):
        nrows, ncols = 7, 13
        source_dist = Distribution(self.context, (nrows, ncols), ('b', 'b'),
                                   (2, 2),
                                   targets=range(4))
        dest_gdd = ({
            'dist_type': 'b',
            'bounds': [0, nrows // 3, nrows],
        }, {
            'dist_type': 'b',
            'bounds': [0, ncols // 3, ncols],
        })
        dest_dist = Distribution.from_global_dim_data(self.context,
                                                      dest_gdd,
                                                      targets=range(4))
        source_da = self.context.empty(source_dist, dtype=numpy.int32)
        source_da.fill(-42)

        dest_da = source_da.distribute_as(dest_dist)
        assert_array_equal(source_da.tondarray(), dest_da.tondarray())
Пример #48
0
def create_discrete_colormaps(num_values):
    """ Create colormap objects for a discrete colormap.

    Parameters
    ----------
    num_values : The number of distinct colors to use.

    Returns
    -------
    cmap, norm, text_colors : tuple
        The matplotlib colormap, norm, and recommended text colors.
        text_colors is an array of length num_values,
        with each entry being a nice color for text drawn
        on top of the colormap selection.
    """
    # Create discrete colormap for matplotlib.
    cmap = cmap_discretize(cm.jet, num_values)
    bounds = range(num_values + 1)
    norm = colors.BoundaryNorm(bounds, cmap.N)

    # Choose a text color for each discrete color.
    # The idea is to pick black for colors near white.
    # This is not sophisticated but ok for this use.
    text_colors = []
    for j in range(num_values):
        # Get rgb color that matshow() will use.
        jj = float(j + 0.5) / float(num_values)
        cj = cmap(jj)
        # Get average of rgb values.
        avg = (cj[0] + cj[1] + cj[2]) / 3.0
        # With 4-color jet, avg cyan=0.6111, yellow=0.6337.
        # Choose empirically reasonable cutoff.
        if avg >= 0.5:
            text_color = 'black'
        else:
            text_color = 'white'
        text_colors.append(text_color)

    # Return a tuple with all the parts.
    colormaps = (cmap, norm, text_colors)
    return colormaps
Пример #49
0
 def test_from_global_dim_data_uu(self):
     rows = 6
     cols = 20
     row_ixs = numpy.random.permutation(range(rows))
     col_ixs = numpy.random.permutation(range(cols))
     row_indices = [row_ixs[:rows // 2], row_ixs[rows // 2:]]
     col_indices = [col_ixs[:cols // 4], col_ixs[cols // 4:]]
     glb_dim_data = (
         {
             'dist_type': 'u',
             'indices': row_indices
         },
         {
             'dist_type': 'u',
             'indices': col_indices
         },
     )
     distribution = Distribution.from_global_dim_data(
         self.context, glb_dim_data)
     distarr = DistArray(distribution, dtype=int)
     distarr.toarray()
Пример #50
0
 def test_sum_4D_cyclic(self):
     shape = (10, 20, 30, 40)
     arr = numpy.zeros(shape)
     arr.fill(3)
     dist = Distribution(self.context,
                         shape=shape,
                         dist=('c', 'c', 'c', 'c'))
     darr = self.context.empty(shape_or_dist=dist)
     darr.fill(3)
     for axis in range(4):
         arr_sum = arr.sum(axis=axis)
         darr_sum = darr.sum(axis=axis)
         assert_allclose(darr_sum.tondarray(), arr_sum)
     assert_allclose(darr.sum().tondarray(), arr.sum())
Пример #51
0
 def __init__(self,
              size,
              grid_size,
              bounds=None,
              comm_padding=None,
              boundary_padding=None):
     self.size = size
     self.grid_size = grid_size
     if bounds is None:
         self.bounds = [
             _start_stop_block(size, grid_size, grid_rank)
             for grid_rank in range(grid_size)
         ]
     else:
         self.bounds = bounds
     self.comm_padding = comm_padding or 0
     self.boundary_padding = boundary_padding or 0
Пример #52
0
 def test_from_global_dim_data_1d(self):
     total_size = 40
     list_of_indices = [
         [29, 38, 18, 19, 11, 33, 10, 1, 22, 25],
         [5, 15, 34, 12, 16, 24, 23, 39, 6, 36],
         [0, 7, 27, 4, 32, 37, 21, 26, 9, 17],
         [35, 14, 20, 13, 3, 30, 2, 8, 28, 31],
     ]
     glb_dim_data = ({
         'dist_type': 'u',
         'indices': list_of_indices,
     }, )
     distribution = Distribution.from_global_dim_data(
         self.context, glb_dim_data)
     distarr = DistArray(distribution, dtype=int)
     for i in range(total_size):
         distarr[i] = i
     localarrays = distarr.get_localarrays()
     for i, arr in enumerate(localarrays):
         assert_allclose(arr, list_of_indices[i])
Пример #53
0
 def get_dimdicts(self):
     bounds = self.bounds or [[0, 0]]
     grid_ranks = range(len(bounds))
     cpadding = self.comm_padding
     padding = [[cpadding, cpadding] for _ in grid_ranks]
     if len(padding) > 0:
         padding[0][0] = self.boundary_padding
         padding[-1][-1] = self.boundary_padding
     data_tuples = zip(grid_ranks, padding, bounds)
     # Build the result
     out = []
     for grid_rank, padding, (start, stop) in data_tuples:
         out.append({
             'dist_type': 'b',
             'size': self.size,
             'proc_grid_size': self.grid_size,
             'proc_grid_rank': grid_rank,
             'start': start,
             'stop': stop,
             'padding': padding,
         })
     return tuple(out)
Пример #54
0
def global_flat_indices(dim_data):
    """
    Return a list of tuples of indices into the flattened global array.

    Parameters
    ----------
    dim_data: dimension dictionary.

    Returns
    -------
    list of 2-tuples of ints.
        Each tuple is a (start, stop) interval into the flattened global array.
        All selected ranges comprise the indices for this dim_data's sub-array.

    """
    # TODO: FIXME: can be optimized when the last dimension is 'n'.

    for dd in dim_data:
        if dd['dist_type'] == 'n':
            dd['start'] = 0
            dd['stop'] = dd['size']

    glb_shape = tuple(dd['size'] for dd in dim_data)
    glb_strides = strides_from_shape(glb_shape)

    ranges = [range(dd['start'], dd['stop']) for dd in dim_data[:-1]]
    start_ranges = ranges + [[dim_data[-1]['start']]]
    stop_ranges = ranges + [[dim_data[-1]['stop']]]

    def flatten(idx):
        return sum(a * b for (a, b) in zip(idx, glb_strides))

    starts = map(flatten, product(*start_ranges))
    stops = map(flatten, product(*stop_ranges))

    intervals = zip(starts, stops)
    return condense(intervals)
Пример #55
0
def cmap_discretize(cmap, N):
    """Create a discrete colormap from the continuous colormap cmap.

    Parameters
    ----------
    cmap : colormap instance, or string
        The continuous colormap, as object or name, to make discrete.
        For example, matplotlib.cm.jet, or 'jet'.
    N : int
        The number of discrete colors desired.

    Returns
    -------
    colormap
        The desired discrete colormap.

    Example
    -------
    >>> x = resize(arange(100), (5,100))
    >>> djet = cmap_discretize(cm.jet, 5)
    >>> pyplot.imshow(x, cmap=djet)
    """
    # This is copied from:
    # http://wiki.scipy.org/Cookbook/Matplotlib/ColormapTransformations
    if type(cmap) == str:
        cmap = cm.get_cmap(cmap)
    colors_i = concatenate((linspace(0, 1., N), (0., 0., 0., 0.)))
    colors_rgba = cmap(colors_i)
    indices = linspace(0, 1., N + 1)
    cdict = {}
    for ki, key in enumerate(('red', 'green', 'blue')):
        cdict[key] = [
            (indices[i], colors_rgba[i - 1, ki], colors_rgba[i, ki])
            for i in range(N + 1)
        ]
    # Return colormap object.
    return colors.LinearSegmentedColormap(cmap.name + "_%d" % N, cdict, 1024)
Пример #56
0
    def test_from_global_dim_data_bu(self):

        rows = 9
        row_break_point = rows // 2
        cols = 10
        col_indices = numpy.random.permutation(range(cols))
        col_break_point = len(col_indices) // 3
        indices = [
            col_indices[:col_break_point], col_indices[col_break_point:]
        ]
        glb_dim_data = (
            {
                'dist_type': 'b',
                'bounds': (0, row_break_point, rows)
            },
            {
                'dist_type': 'u',
                'indices': indices
            },
        )
        distribution = Distribution.from_global_dim_data(
            self.context, glb_dim_data)
        distarr = DistArray(distribution, dtype=int)
        distarr.toarray()
Пример #57
0
 def test_global_from_local_index(self):
     lis = range(4)
     gis = [self.m.global_from_local_index(li) for li in lis]
     expected = (2, 6, 10, 14)
     self.assertSequenceEqual(gis, expected)
Пример #58
0
 def test_local_from_global_index(self):
     gis = (2, 6, 10, 14)
     lis = [self.m.local_from_global_index(gi) for gi in gis]
     expected = tuple(range(4))
     self.assertSequenceEqual(lis, expected)
Пример #59
0
 def test_global_from_local_index(self):
     lis = range(23)
     gis = [self.m.global_from_local_index(li) for li in lis]
     expected = list(range(16, 39))
     self.assertSequenceEqual(gis, expected)