Пример #1
0
    def _transport_initialise(self, options):
        MPI_setup(options)

        backend = options.get("dosna_backend") or DEFAULT_BACKEND
        engine = options.get("dosna_engine") or DEFAULT_ENGINE

        dosna_connection = options.get("dosna_connection") \
            or DEFAULT_CONNECTION
        dosna_options = {}

        if backend == "ceph":
            dosna_options["conffile"] = options.get("dosna_ceph_conffile") \
                or DEFAULT_CEPH_CONFFILE
            dosna_options["client_id"] = options.get("dosna_ceph_client_id") \
                or DEFAULT_CEPH_CLIENT_ID
        elif backend == "hdf5":
            dosna_options["directory"] = options.get("dosna_hdf5_dir") \
                or DEFAULT_HDF5_DIR
        log.debug("DosNa is using backend %s engine %s and options %s",
                  backend, engine, dosna_options)
        dn.use(engine, backend)
        self.dosna_connection = dn.Connection(dosna_connection,
                                              **dosna_options)
        self.dosna_connection.connect()
        # initially reading from a hdf5 file so Hdf5TransportData will be used
        # for all datasets created in a loader
        options['transport'] = 'hdf5'
Пример #2
0
 def test_connection(self):
     connection_handle = dn.Connection(**self.CONNECTION_CONFIG)
     self.assertIsNotNone(connection_handle)
     self.assertFalse(connection_handle.connected)
     connection_handle.connect()
     self.assertTrue(connection_handle.connected)
     connection_handle.disconnect()
     self.assertFalse(connection_handle.connected)
Пример #3
0
    def setUp(self):
        if ENGINE == 'mpi' and mpi_size() > 1:
            self.skipTest("This should not test concurrent access")

        log.info('DatasetTest: %s, %s, %s', BACKEND, ENGINE, CONNECTION_CONFIG)

        dn.use(backend=BACKEND, engine=ENGINE)
        self.connection_handle = dn.Connection(**CONNECTION_CONFIG)
        self.connection_handle.connect()
        self.fake_dataset = 'NotADataset'
        self.data = np.random.random_integers(DATASET_NUMBER_RANGE[0],
                                              DATASET_NUMBER_RANGE[1],
                                              DATA_SIZE)
        self.dataset = self.connection_handle.create_dataset(
            self.fake_dataset, data=self.data, chunk_size=DATA_CHUNK_SIZE)
Пример #4
0
    def setUp(self):
        if ENGINE != "mpi" or mpi_size() < 2:
            self.skipTest("Test for engine mpi with several processes")

        if BACKEND == "ram":
            self.skipTest("Concurrent access in backend ram is not supported")

        log.info('DatasetTest: %s, %s, %s', BACKEND, ENGINE, CONNECTION_CONFIG)

        dn.use(backend=BACKEND, engine=ENGINE)
        self.connection_handle = dn.Connection(**CONNECTION_CONFIG)
        self.connection_handle.connect()
        self.fake_dataset = 'NotADataset'
        data = None
        if mpi_is_root():
            data = np.random.random_integers(DATASET_NUMBER_RANGE[0],
                                             DATASET_NUMBER_RANGE[1],
                                             DATA_SIZE)
        self.data = mpi_comm().bcast(data, root=0)
        self.dataset = self.connection_handle.create_dataset(
            self.fake_dataset, data=self.data, chunk_size=DATA_CHUNK_SIZE)
Пример #5
0
    def _transport_initialise(self, options):
        MPI_setup(options)

        backend = options.get("dosna_backend") or DEFAULT_BACKEND
        engine = options.get("dosna_engine") or DEFAULT_ENGINE
        dosna_connection_name = options.get("dosna_connection") \
            or DEFAULT_CONNECTION
        dosna_connection_options = options.get("dosna_connection_options")

        dosna_options = {}

        dosna_options.update(
            dict(item.split('=') for item in dosna_connection_options))
        log.debug("DosNa is using backend %s engine %s and options %s",
                  backend, engine, dosna_options)
        dn.use(engine, backend)
        self.dosna_connection = dn.Connection(dosna_connection_name,
                                              **dosna_options)
        self.dosna_connection.connect()
        # initially reading from a hdf5 file so Hdf5TransportData will be used
        # for all datasets created in a loader
        options['transport'] = 'hdf5'
Пример #6
0
    ds3_.delete()

    return T.time


###############################################################################
# Start tests!
if mpi_root():
    hout, dout = get_output_dataset()

for i, DS in enumerate(DATA_SIZE):

    f, data = create_random_dataset(DS)

    for j, CS in enumerate(CHUNK_SIZE):
        with dn.Connection(**CONNECTION_CONFIG) as connection:
            pprint('Loading Data -- shape: {} chunk_size: {}'.format(DS, CS))
            with MpiTimer('Data loaded') as t:
                dataset = connection.create_dataset('data',
                                                    data=data,
                                                    chunk_size=(CS, CS, CS))

            for k in range(NTESTS):
                t1 = convolve1(dataset, SIGMA)
                t2 = convolve2(dataset, SIGMA)

                if mpi_root():
                    dout[i, j, 0, k] = t.time
                    dout[i, j, 1, k] = t1
                    dout[i, j, 2, k] = t2
Пример #7
0
 def setUpClass(cls):
     dn.use(backend=BACKEND, engine=ENGINE)
     cls.connection_handle = dn.Connection(**CONNECTION_CONFIG)
     cls.connection_handle.connect()
Пример #8
0
    def setUp(self):
        log.info('ConnectionTest: %s, %s, %s',
                 self.BACKEND, self.ENGINE, self.CONNECTION_CONFIG)

        dn.use(backend=self.BACKEND, engine=self.ENGINE)
        self.connection_handle = dn.Connection(**self.CONNECTION_CONFIG)