def test_mpi_attr(comm): if comm.rank == 0: fname = tempfile.mkdtemp() fname = comm.bcast(fname) else: fname = comm.bcast(None) x = BigFileMPI(comm, fname, create=True) with x.create('.', dtype=None) as b: b.attrs['int'] = 128 b.attrs['float'] = [128.0, 3, 4] b.attrs['string'] = 'abcdefg' with x.open('.') as b: assert_equal(b.attrs['int'], 128) assert_equal(b.attrs['float'], [128.0, 3, 4]) assert_equal(b.attrs['string'], 'abcdefg') b.attrs['int'] = 30 b.attrs['float'] = [3, 4] b.attrs['string'] = 'defg' with x.open('.') as b: assert_equal(b.attrs['int'], 30) assert_equal(b.attrs['float'], [3, 4]) assert_equal(b.attrs['string'], 'defg') comm.barrier() if comm.rank == 0: shutil.rmtree(fname)
def test_mpi_large(comm): if comm.rank == 0: fname = tempfile.mkdtemp() fname = comm.bcast(fname) else: fname = comm.bcast(None) x = BigFileMPI(comm, fname, create=True) size= 1024 * 1024 for name, d in dtypes: d = numpy.dtype(d) numpy.random.seed(1234) # test creating with create_array; large enough for all types data = numpy.random.uniform(100000, size=4 * size).view(dtype=d.base).reshape([-1] + list(d.shape))[:size] data1 = comm.scatter(numpy.array_split(data, comm.size)) with x.create_from_array(name, data1, memorylimit=1024 * 128) as b: pass with x[name] as b: assert_equal(b[:], data.astype(d.base)) comm.barrier() if comm.rank == 0: shutil.rmtree(fname)
def test_mpi_large(comm): if comm.rank == 0: fname = tempfile.mkdtemp() fname = comm.bcast(fname) else: fname = comm.bcast(None) x = BigFileMPI(comm, fname, create=True) size= 1024 * 1024 for d in dtypes: d = numpy.dtype(d) numpy.random.seed(1234) # test creating with create_array; large enough for all types data = numpy.random.uniform(100000, size=4 * size).view(dtype=d.base).reshape([-1] + list(d.shape))[:size] data1 = comm.scatter(numpy.array_split(data, comm.size)) with x.create_from_array(d.str, data1, memorylimit=1024 * 128) as b: pass with x[d.str] as b: assert_equal(b[:], data.astype(d.base)) comm.barrier() if comm.rank == 0: shutil.rmtree(fname)
def test_mpi_create(comm): if comm.rank == 0: fname = tempfile.mkdtemp() fname = comm.bcast(fname) else: fname = comm.bcast(None) x = BigFileMPI(comm, fname, create=True) for d in dtypes: d = numpy.dtype(d) numpy.random.seed(1234) # test creating with x.create(d.str, Nfile=1, dtype=d, size=128) as b: data = numpy.random.uniform(100000, size=128 * 128).view( dtype=b.dtype.base).reshape([-1] + list(d.shape))[:b.size] b.write(0, data) with x[d.str] as b: assert_equal(b[:], data.astype(d.base)) # test writing with an offset with x[d.str] as b: b.write(1, data[0:1]) assert_equal(b[1:2], data[0:1].astype(d.base)) # test writing beyond file length with x[d.str] as b: caught = False try: b.write(1, data) except: caught = True assert caught assert_equal(set(x.blocks), set([numpy.dtype(d).str for d in dtypes])) for b in x.blocks: assert b in x for b in x: assert b in x bd = BigData(x) assert set(bd.dtype.names) == set(x.blocks) d = bd[:] comm.barrier() if comm.rank == 0: shutil.rmtree(fname)
def to_real_field(self): """ Return the RealField stored on disk. .. note:: The mesh stored on disk must be stored with ``mode=real`` Returns ------- real : pmesh.pm.RealField an array-like object holding the mesh loaded from disk in configuration space """ if self.isfourier: return NotImplemented # the real field to paint to pmread = self.pm with BigFileMPI(comm=self.comm, filename=self.path)[self.dataset] as ds: if self.comm.rank == 0: self.logger.info("reading real field from %s" % self.path) real2 = RealField(pmread) start = sum(self.comm.allgather(real2.size)[:self.comm.rank]) end = start + real2.size real2.unsort(ds[start:end]) return real2
def to_complex_field(self): """ Return the ComplexField stored on disk. .. note:: The mesh stored on disk must be stored with ``mode=complex`` Returns ------- real : pmesh.pm.ComplexField an array-like object holding the mesh loaded from disk in Fourier space """ if not self.isfourier: return NotImplemented pmread = self.pm if self.comm.rank == 0: self.logger.info("reading complex field from %s" % self.path) with BigFileMPI(comm=self.comm, filename=self.path)[self.dataset] as ds: complex2 = ComplexField(pmread) assert self.comm.allreduce(complex2.size) == ds.size start = sum(self.comm.allgather(complex2.size)[:self.comm.rank]) end = start + complex2.size complex2.unsort(ds[start:end]) return complex2
def test_mpi_create(comm): if comm.rank == 0: fname = tempfile.mkdtemp() fname = comm.bcast(fname) else: fname = comm.bcast(None) x = BigFileMPI(comm, fname, create=True) for d in dtypes: d = numpy.dtype(d) numpy.random.seed(1234) # test creating with x.create(d.str, Nfile=1, dtype=d, size=128) as b: data = numpy.random.uniform(100000, size=128*128).view(dtype=b.dtype.base).reshape([-1] + list(d.shape))[:b.size] b.write(0, data) with x[d.str] as b: assert_equal(b[:], data.astype(d.base)) # test writing with an offset with x[d.str] as b: b.write(1, data[0:1]) assert_equal(b[1:2], data[0:1].astype(d.base)) # test writing beyond file length with x[d.str] as b: caught = False try: b.write(1, data) except: caught = True assert caught assert_equal(set(x.blocks), set([numpy.dtype(d).str for d in dtypes])) for b in x.blocks: assert b in x for b in x: assert b in x bd = BigData(x) assert set(bd.dtype.names) == set(x.blocks) d = bd[:] comm.barrier() if comm.rank == 0: shutil.rmtree(fname)
def __init__(self, path, dataset, comm=None, **kwargs): self.path = path self.dataset = dataset self.comm = comm # update the meta-data self.attrs.update(kwargs) with BigFileMPI(comm=self.comm, filename=path)[dataset] as ff: for key in ff.attrs: v = ff.attrs[key] if isinstance(v, string_types) and v.startswith('json://'): self.attrs[key] = json.loads(v[7:], cls=JSONDecoder) else: self.attrs[key] = numpy.squeeze(v) # fourier space or config space if ff.dtype.kind == 'c': self.isfourier = True if ff.dtype.itemsize == 16: dtype = 'f8' else: dtype = 'f4' else: self.isfourier = False if ff.dtype.itemsize == 8: dtype = 'f8' else: dtype = 'f4' # determine Nmesh if 'ndarray.shape' not in self.attrs: raise ValueError( "`ndarray.shape` should be stored in the Bigfile `attrs` to determine `Nmesh`" ) if 'Nmesh' not in self.attrs: raise ValueError( "`ndarray.shape` should be stored in the Bigfile `attrs` to determine `Nmesh`" ) Nmesh = self.attrs['Nmesh'] BoxSize = self.attrs['BoxSize'] MeshSource.__init__(self, BoxSize=BoxSize, Nmesh=Nmesh, dtype=dtype, comm=comm)