Пример #1
0
 def test_connection(self):
     c = dn.Cluster(self.CONFIG)
     self.assertIsNotNone(c)
     self.assertFalse(c.connected)
     c.connect()
     self.assertTrue(c.connected)
     c.disconnect()
     self.assertFalse(c.connected)
Пример #2
0
    def setUp(self):
        self.handler = logging.StreamHandler(sys.stdout)
        log.addHandler(self.handler)
        log.info('ClusterTest: {}, {}, {}'.format(self.BACKEND, self.ENGINE,
                                                  self.CONFIG))

        dn.use(backend=self.BACKEND, engine=self.ENGINE)
        self.C = dn.Cluster(self.CONFIG)
Пример #3
0
    def setUp(self):
        self.handler = logging.StreamHandler(sys.stdout)
        log.addHandler(self.handler)
        log.info('PoolTest: {}, {}, {}'
                 .format(self.BACKEND, self.ENGINE, self.CONFIG))

        dn.use(backend=self.BACKEND, engine=self.ENGINE)
        self.C = dn.Cluster(self.CONFIG)
        if self.BACKEND != 'ceph' and not self.C.has_pool(self.POOL):
            # Create pool for 'ram' and 'hdf5' backends
            self.C.create_pool(self.POOL)
        self.pool = self.C[self.POOL]
        self.fakeds = 'NotADataset'
Пример #4
0
    data = np.random.rand(DS, DS, DS).astype(np.float32)
    with h5.File(f.name, 'w') as g:
        g.create_dataset('data', data=data)
    fname = f.name
else:
    fname = None
fname = mpi_comm().bcast(fname, root=0)

h5in = h5.File(fname, 'r')
h5data = h5in['data']

###############################################################################

with MpiTimer('DosNa %s (%s)' % (engine.name, backend.name)):

    with dn.Cluster('/tmp/') as C:

        with C.create_pool('test_dosna') as P:
            data = P.create_dataset('test_data',
                                    data=h5data,
                                    chunks=(CS, CS, CS))

            for start in range(mpi_rank() * CS, DS, mpi_size() * CS):
                stop = start + CS
                i = start
                j = min(stop, DS)
                np.testing.assert_allclose(data[i:j], h5data[i:j])

            for start in range(mpi_rank() * CS, DS, mpi_size() * CS):
                stop = start + CS
                i = start
Пример #5
0

import sys
import os.path as op

import dosna as dn
dn.use(engine='cpu', backend='ceph')

parent_folder = op.realpath(op.join(op.dirname(__file__), '..'))

if __name__ == '__main__':
    if len(sys.argv) == 2:
        conffile = op.join(parent_folder, 'ceph.conf')
    else:
        conffile = sys.argv[2]

    if len(sys.argv) <= 1:
        sys.exit(1)

    with dn.Cluster('Dummy', conffile=conffile, timeout=5) as C:
        for pool in sys.argv[1:]:
            print('Deleting pool', pool)
            C.del_pool(pool)
Пример #6
0
    params = dict(conffile=sys.argv[3]) if len(sys.argv) > 3 else {}

    np.set_printoptions(formatter={'float': '{: 0.3f}'.format})

    if mpi_root():
        rdata = np.random.rand(DS, DS, DS).astype(np.float32)
    else:
        rdata = None
    rdata = mpi_comm().bcast(rdata, root=0)

    with MpiTimer('DosNa (engine: %s, backend: %s)' %
                  (engine.name, backend.name)):

        pprint("Connecting", rank=0)
        with dn.Cluster('Cluster', **params) as C:
            pprint("Connected", C.connected)

            with C.create_pool('test_dosna') as P:
                pprint("Pool Created", rank=0)
                with MpiTimer('Dataset Created'):
                    data = P.create_dataset('test_data',
                                            data=rdata,
                                            chunks=(CS, CS, CS))
                if mpi_root():
                    pprint('Asserting the quality')
                    for i, j in [(0, 30), (10, 50), (5, 25), (37, 91)]:
                        np.testing.assert_allclose(data[i:j, i:j, i:j],
                                                   rdata[i:j, i:j, i:j])

                    print(data[25, 25, 25:35])
Пример #7
0
    ds3_.delete()

    return T.time


###############################################################################
# Start tests!

hout, dout = get_output_dataset()

for i, DS in enumerate(DATA_SIZE):

    f, data = create_random_dataset(DS)

    for j, CS in enumerate(CHUNK_SIZE):
        with dn.Cluster(CLUSTERCFG) as C:
            if backend.name in ['ram', 'hdf5'] and not C.has_pool(POOL):
                C.create_pool(POOL)
            with C[POOL] as P:
                pprint('Loading Data -- shape: {} chunks: {}'
                       .format(DS, CS))
                with MpiTimer('Data loaded') as t:
                    ds = P.create_dataset('data', data=data, chunks=(CS, CS, CS))

                for k in range(NTESTS):
                    t1 = convolve1(ds, SIGMA)
                    t2 = convolve2(ds, SIGMA)

                    if mpi_root():
                        dout[i, j, 0, k] = t.time
                        dout[i, j, 1, k] = t1