def test_inplace_fft(comm): pm = ParticleMesh(BoxSize=8.0, Nmesh=[8, 8], comm=comm, dtype='f8') real = RealField(pm) numpy.random.seed(1234) if comm.rank == 0: Npar = 100 else: Npar = 0 pos = 1.0 * (numpy.arange(Npar * len(pm.Nmesh))).reshape( -1, len(pm.Nmesh)) * (7, 7) pos %= (pm.Nmesh + 1) layout = pm.decompose(pos) npos = layout.exchange(pos) real = pm.paint(npos) complex = real.r2c() complex2 = real.r2c(out=Ellipsis) assert real._base in complex2._base assert_almost_equal(numpy.asarray(complex), numpy.asarray(complex2), decimal=7) real = complex2.c2r() real2 = complex2.c2r(out=Ellipsis) assert real2._base in complex2._base assert_almost_equal(numpy.asarray(real), numpy.asarray(real2), decimal=7)
def test_inplace_fft(comm): pm = ParticleMesh(BoxSize=8.0, Nmesh=[8, 8], comm=comm, dtype='f8') real = RealField(pm) numpy.random.seed(1234) if comm.rank == 0: Npar = 100 else: Npar = 0 pos = 1.0 * (numpy.arange(Npar * len(pm.Nmesh))).reshape(-1, len(pm.Nmesh)) * (7, 7) pos %= (pm.Nmesh + 1) layout = pm.decompose(pos) npos = layout.exchange(pos) real = pm.paint(npos) complex = real.r2c() complex2 = real.r2c(out=Ellipsis) assert real._base in complex2._base assert_almost_equal(numpy.asarray(complex), numpy.asarray(complex2), decimal=7) real = complex2.c2r() real2 = complex2.c2r(out=Ellipsis) assert real2._base in complex2._base assert_almost_equal(numpy.asarray(real), numpy.asarray(real2), decimal=7)
def test_fupsample(comm): pm1 = ParticleMesh(BoxSize=8.0, Nmesh=[8, 8], comm=comm, dtype='f8') pm2 = ParticleMesh(BoxSize=8.0, Nmesh=[4, 4], comm=comm, dtype='f8') numpy.random.seed(3333) truth = numpy.fft.rfftn(numpy.random.normal(size=(8, 8))) complex1 = ComplexField(pm1) for ind in numpy.ndindex(*complex1.cshape): complex1.csetitem(ind, truth[ind]) if any(i == 4 for i in ind): complex1.csetitem(ind, 0) else: complex1.csetitem(ind, truth[ind]) if any(i >= 2 and i < 7 for i in ind): complex1.csetitem(ind, 0) assert_almost_equal(complex1[...], complex1.c2r().r2c()) complex2 = ComplexField(pm2) for ind in numpy.ndindex(*complex2.cshape): newind = tuple([i if i <= 2 else 8 - (4 - i) for i in ind]) if any(i == 2 for i in ind): complex2.csetitem(ind, 0) else: complex2.csetitem(ind, truth[newind]) tmpr = RealField(pm1) tmp = ComplexField(pm1) complex2.resample(tmp) assert_almost_equal(complex1[...], tmp[...], decimal=5) complex2.c2r().resample(tmp) assert_almost_equal(complex1[...], tmp[...], decimal=5) complex2.resample(tmpr) assert_almost_equal(tmpr.r2c(), tmp[...]) complex2.c2r().resample(tmpr) assert_almost_equal(tmpr.r2c(), tmp[...])
def paint(self, pm, datasource): """ Paint the ``DataSource`` specified by ``input`` onto the ``ParticleMesh`` specified by ``pm`` Parameters ---------- pm : ``ParticleMesh`` particle mesh object that does the painting datasource : ``DataSource`` the data source object representing the field to paint onto the mesh Returns ------- stats : dict dictionary of statistics, usually only containing `Ntot` """ stats = {} real = RealField(pm) real[:] = 0 if isinstance(datasource, DataSource): # open the datasource stream (with no defaults) if self.interlaced: real2 = RealField(pm) real2[...] = 0 with datasource.open() as stream: Nlocal = 0 if self.weight is None: for [position] in stream.read(['Position']): if not self.interlaced: self.basepaint(real, position, paintbrush=self.paintbrush) else: self.shiftedpaint(real, real2, position, paintbrush=self.paintbrush) Nlocal += len(position) else: for position, weight in stream.read( ['Position', self.weight]): if not self.interlaced: self.basepaint(real, position, weight=weight, paintbrush=self.paintbrush) else: self.shiftedpaint(real, real2, position, weight=weight, paintbrush=self.paintbrush) Nlocal += len(position) if self.interlaced: c1 = real.r2c() c2 = real2.r2c() H = pm.BoxSize / pm.Nmesh for k, s1, s2 in zip(c1.slabs.x, c1.slabs, c2.slabs): kH = sum(k[i] * H[i] for i in range(3)) s1[...] = s1[...] * 0.5 + s2[...] * 0.5 * numpy.exp( 0.5 * 1j * kH) c1.c2r(real) stats['Ntot'] = self.comm.allreduce(Nlocal) elif isinstance(datasource, GridSource): datasource.read(real) stats['Ntot'] = datasource.Ntot # apply the filters. mean = self.comm.allreduce(real.sum(dtype='f8')) / real.Nmesh.prod() if self.comm.rank == 0: self.logger.info("Mean = %g" % mean) if self.normalize: real[...] *= 1. / mean mean = self.comm.allreduce( real.sum(dtype='f8')) / real.Nmesh.prod() if self.comm.rank == 0: self.logger.info("Renormalized mean = %g" % mean) if self.setMean is not None: real[...] += (self.setMean - mean) if self.fk: if self.comm.rank == 0: self.logger.info("applying transformation fk %s" % self.fk) def function(k, kx, ky, kz): from numpy import exp, sin, cos return eval(self.fk) complex = real.r2c() for kk, slab in zip(complex.slabs.x, complex.slabs): k = sum([k**2 for k in kk])**0.5 slab[...] *= function(k, kk[0], kk[1], kk[2]) complex.c2r(real) mean = self.comm.allreduce( real.sum(dtype='f8')) / real.Nmesh.prod() if self.comm.rank == 0: self.logger.info("after fk, mean = %g" % mean) if self.frho: if self.comm.rank == 0: self.logger.info("applying transformation frho %s" % self.frho) def function(rho): return eval(self.frho) if self.comm.rank == 0: self.logger.info("example value before frho %g" % real.flat[0]) for slab in real.slabs: slab[...] = function(slab) if self.comm.rank == 0: self.logger.info("example value after frho %g" % real.flat[0]) mean = self.comm.allreduce( real.sum(dtype='f8')) / real.Nmesh.prod() if self.comm.rank == 0: self.logger.info("after frho, mean = %g" % mean) return real, stats
def to_real_field(self, out=None, normalize=True): """ Paint the density field, by interpolating the position column on to the mesh. This computes the following meta-data attributes in the process of painting, returned in the :attr:`attrs` attributes of the returned RealField object: - N : int the (unweighted) total number of objects painted to the mesh - W : float the weighted number of total objects, equal to the collective sum of the 'weight' column - shotnoise : float the Poisson shot noise, equal to the volume divided by ``N`` - num_per_cell : float the mean number of weighted objects per cell .. note:: The density field on the mesh is normalized as :math:`1+\delta`, such that the collective mean of the field is unity. See the :ref:`documentation <painting-mesh>` on painting for more details on painting catalogs to a mesh. Returns ------- real : :class:`pmesh.pm.RealField` the painted real field; this has a ``attrs`` dict storing meta-data """ pm = self.pm Nlocal = 0 # (unweighted) number of particles read on local rank Wlocal = 0 # (weighted) number of particles read on local rank W2local = 0 # sum of weight square. This is used to estimate shotnoise. # the paint brush window resampler = window.methods[self.resampler] # initialize the RealField to return if out is not None: assert isinstance( out, RealField), "output of to_real_field must be a RealField" numpy.testing.assert_array_equal(out.pm.Nmesh, pm.Nmesh) toret = out else: toret = RealField(pm) toret[:] = 0 # for interlacing, we need two empty meshes if out was provided # since out may have non-zero elements, messing up our interlacing sum if self.interlaced: real1 = RealField(pm) real1[:] = 0 # the second, shifted mesh (always needed) real2 = RealField(pm) real2[:] = 0 Position = self.Position Weight = self.Weight Value = self.Value Selection = self.Selection # ensure the slices are synced, since decomposition is collective Nlocalmax = max(pm.comm.allgather(len(Position))) H = pm.BoxSize / pm.Nmesh # paint data in chunks on each rank; # we do this by chunk 8 million is pretty big anyways. max_chunksize = _global_options['paint_chunk_size'] # use a local scope to avoid having two copies of data in memory def dochunk(s): if len(Position) != 0: # selection has to be computed many times when data is `large`. columns = [Position[s]] if Weight is not None: columns.append(Weight[s]) if Value is not None: columns.append(Value[s]) if Selection is not None: columns.append(Selection[s]) # be sure to use the source to compute data = self.source.compute(columns) sel = Ellipsis if Selection is None else data.pop() value = None if Value is None else data.pop()[sel] weight = None if Weight is None else data.pop()[sel] position = data.pop()[sel] else: # workaround a potential dask issue on empty dask arrays position = numpy.empty((0, 3), dtype=Position.dtype) weight = None value = None if weight is None: weight = numpy.ones(len(position)) if value is None: value = numpy.ones(len(position)) # track total (selected) number and sum of weights Nlocal = len(position) Wlocal = weight.sum() W2local = (weight**2).sum() # no interlacing if not self.interlaced: lay = pm.decompose(position, smoothing=0.5 * resampler.support) else: lay = pm.decompose(position, smoothing=1.0 * resampler.support) # if we are receiving too many particles, abort and retry with a smaller chunksize recvlengths = pm.comm.allgather(lay.recvlength) if any( [recvlength > 2 * max_chunksize for recvlength in recvlengths]): if pm.comm.rank == 0: self.logger.info( "Throttling chunksize as some ranks will receive too many particles. (%d > %d)" % (max(recvlengths), max_chunksize * 2)) raise StopIteration p = lay.exchange(position) w = lay.exchange(weight) v = lay.exchange(value) if not self.interlaced: pm.paint(p, mass=w * v, resampler=resampler, hold=True, out=toret) # interlacing: use 2 meshes separated by 1/2 cell size else: # in mesh units shifted = pm.affine.shift(0.5) # paint to two shifted meshes pm.paint(p, mass=w * v, resampler=resampler, hold=True, out=real1) pm.paint(p, mass=w * v, resampler=resampler, transform=shifted, hold=True, out=real2) return Nlocal, Wlocal, W2local import gc i = 0 chunksize = max_chunksize while i < Nlocalmax: s = slice(i, i + chunksize) if pm.comm.rank == 0: self.logger.info("Chunk %d ~ %d / %d " % (i, i + chunksize, Nlocalmax)) try: Nlocal1, Wlocal1, W2local1 = dochunk(s) except StopIteration: chunksize = chunksize // 2 if chunksize < 1: raise RuntimeError( "Cannot find a chunksize that fits into memory.") continue finally: # collect unfreed items gc.collect() Nlocal += Nlocal1 Wlocal += Wlocal1 W2local += W2local1 Nglobal = pm.comm.allreduce(Nlocal) if pm.comm.rank == 0: self.logger.info("painted %d out of %d objects to mesh" % (Nglobal, self.source.csize)) i = i + chunksize chunksize = min(max_chunksize, int(chunksize * 1.5)) # now the loop over particles is done if not self.interlaced: # nothing to do, toret is already filled. pass else: # compose the two interlaced fields into the final result. c1 = real1.r2c() c2 = real2.r2c() # and then combine for k, s1, s2 in zip(c1.slabs.x, c1.slabs, c2.slabs): kH = sum(k[i] * H[i] for i in range(3)) s1[...] = s1[...] * 0.5 + s2[...] * 0.5 * numpy.exp( 0.5 * 1j * kH) # FFT back to real-space # NOTE: cannot use "toret" here in case user supplied "out" c1.c2r(real1) # need to add to the returned mesh if user supplied "out" toret[:] += real1[:] # unweighted number of objects N = pm.comm.allreduce(Nlocal) # weighted number of objects W = pm.comm.allreduce(Wlocal) # weighted number of objects W2 = pm.comm.allreduce(W2local) # weighted number density (objs/cell) nbar = 1. * W / numpy.prod(pm.Nmesh) # make sure we painted something or nbar is nan; in which case # we set the density to uniform everywhere. if N == 0: warnings.warn(("trying to paint particle source to mesh, " "but no particles were found!"), RuntimeWarning) # shot noise is volume / un-weighted number shotnoise = numpy.prod(pm.BoxSize) * W2 / W**2 # save some meta-data toret.attrs = {} toret.attrs['shotnoise'] = shotnoise toret.attrs['N'] = N toret.attrs['W'] = W toret.attrs['W2'] = W toret.attrs['num_per_cell'] = nbar csum = toret.csum() if pm.comm.rank == 0: self.logger.info("painted %d out of %d objects to mesh" % (N, self.source.csize)) self.logger.info("mean particles per cell is %g", nbar) self.logger.info("sum is %g ", csum) if normalize: if nbar > 0: toret[...] /= nbar else: toret[...] = 1 if pm.comm.rank == 0: self.logger.info("normalized the convention to 1 + delta") return toret
def main(): ns = ap.parse_args() comm = MPI.COMM_WORLD ff = bigfile.BigFileMPI(comm, ns.fastpm) with ff['.'] as bb: BoxSize = bb.attrs['BoxSize'][0] Redshift = 1 / bb.attrs['ScalingFactor'][0] - 1 Nmesh = int(BoxSize / ns.resolution * 2) # round it to 8. Nmesh -= Nmesh % 8 if comm.rank == 0: logger.info("source = %s", ns.fastpm) logger.info("output = %s", ns.output) logger.info("BoxSize = %g", BoxSize) logger.info("Redshift = %g", Redshift) logger.info("Nmesh = %g", Nmesh) pm = ParticleMesh([Nmesh, Nmesh, Nmesh], BoxSize, comm=comm) real = RealField(pm) real[...] = 0 with ff['Position'] as ds: logger.info(ds.size) for i in range(0, ds.size, ns.chunksize): sl = slice(i, i + ns.chunksize) pos = ds[sl] layout = pm.decompose(pos) lpos = layout.exchange(pos) real.paint(lpos, hold=True) mean = real.cmean() if comm.rank == 0: logger.info("mean particle per cell = %s", mean) real[...] /= mean real[...] -= 1 complex = real.r2c() for k, i, slab in zip(complex.slabs.x, complex.slabs.i, complex.slabs): k2 = sum(kd**2 for kd in k) # tophat f = tophat(ns.filtersize, k2**0.5) slab[...] *= f # zreion slab[...] *= Bk(k2**0.5) slab[...] *= (1 + Redshift) real = complex.c2r() real[...] += Redshift mean = real.cmean() if comm.rank == 0: logger.info("zreion.mean = %s", mean) buffer = numpy.empty(real.size, real.dtype) real.sort(out=buffer) if comm.rank == 0: logger.info("sorted for output") with bigfile.BigFileMPI(comm, ns.output, create=True) as ff: with ff.create_from_array(ns.dataset, buffer) as bb: bb.attrs['BoxSize'] = BoxSize bb.attrs['Redshift'] = Redshift bb.attrs['TopHatFilterSize'] = ns.filtersize bb.attrs['Nmesh'] = Nmesh # # hack: compatible with current MPGadget. This is not really needed # we'll remove the bins later, since BoxSize and Nmesh are known. with ff.create("XYZ_bins", dtype='f8', size=Nmesh) as bb: if comm.rank == 0: bins = numpy.linspace(0, BoxSize * 1000., Nmesh, dtype='f8') bb.write(0, bins) if comm.rank == 0: logger.info("done. written at %s", ns.output)
def paint(self, pm, datasource): """ Paint the ``DataSource`` specified by ``input`` onto the ``ParticleMesh`` specified by ``pm`` Parameters ---------- pm : ``ParticleMesh`` particle mesh object that does the painting datasource : ``DataSource`` the data source object representing the field to paint onto the mesh Returns ------- stats : dict dictionary of statistics, usually only containing `Ntot` """ stats = {} real = RealField(pm) real[:] = 0 if isinstance(datasource, DataSource): # open the datasource stream (with no defaults) if self.interlaced: real2 = RealField(pm) real2[...] = 0 with datasource.open() as stream: Nlocal = 0 if self.weight is None: for [position] in stream.read(['Position']): if not self.interlaced: self.basepaint(real, position, paintbrush=self.paintbrush) else: self.shiftedpaint(real, real2, position, paintbrush=self.paintbrush) Nlocal += len(position) else: for position, weight in stream.read(['Position', self.weight]): if not self.interlaced: self.basepaint(real, position, weight=weight, paintbrush=self.paintbrush) else: self.shiftedpaint(real, real2, position, weight=weight, paintbrush=self.paintbrush) Nlocal += len(position) if self.interlaced: c1 = real.r2c() c2 = real2.r2c() H = pm.BoxSize / pm.Nmesh for k, s1, s2 in zip(c1.slabs.x, c1.slabs, c2.slabs): kH = sum(k[i] * H[i] for i in range(3)) s1[...] = s1[...] * 0.5 + s2[...] * 0.5 * numpy.exp(0.5 * 1j * kH) c1.c2r(real) stats['Ntot'] = self.comm.allreduce(Nlocal) elif isinstance(datasource, GridSource): datasource.read(real) stats['Ntot'] = datasource.Ntot # apply the filters. mean = self.comm.allreduce(real.sum(dtype='f8')) / real.Nmesh.prod() if self.comm.rank == 0: self.logger.info("Mean = %g" % mean) if self.normalize: real[...] *= 1. / mean mean = self.comm.allreduce(real.sum(dtype='f8')) / real.Nmesh.prod() if self.comm.rank == 0: self.logger.info("Renormalized mean = %g" % mean) if self.setMean is not None: real[...] += (self.setMean - mean) if self.fk: if self.comm.rank == 0: self.logger.info("applying transformation fk %s" % self.fk) def function(k, kx, ky, kz): from numpy import exp, sin, cos return eval(self.fk) complex = real.r2c() for kk, slab in zip(complex.slabs.x, complex.slabs): k = sum([k ** 2 for k in kk]) ** 0.5 slab[...] *= function(k, kk[0], kk[1], kk[2]) complex.c2r(real) mean = self.comm.allreduce(real.sum(dtype='f8')) / real.Nmesh.prod() if self.comm.rank == 0: self.logger.info("after fk, mean = %g" % mean) if self.frho: if self.comm.rank == 0: self.logger.info("applying transformation frho %s" % self.frho) def function(rho): return eval(self.frho) if self.comm.rank == 0: self.logger.info("example value before frho %g" % real.flat[0]) for slab in real.slabs: slab[...] = function(slab) if self.comm.rank == 0: self.logger.info("example value after frho %g" % real.flat[0]) mean = self.comm.allreduce(real.sum(dtype='f8')) / real.Nmesh.prod() if self.comm.rank == 0: self.logger.info("after frho, mean = %g" % mean) return real, stats
def to_real_field(self, out=None, normalize=True): """ Paint the density field, by interpolating the position column on to the mesh. This computes the following meta-data attributes in the process of painting, returned in the :attr:`attrs` attributes of the returned RealField object: - N : int the (unweighted) total number of objects painted to the mesh - W : float the weighted number of total objects, equal to the collective sum of the 'weight' column - shotnoise : float the Poisson shot noise, equal to the volume divided by ``N`` - num_per_cell : float the mean number of weighted objects per cell .. note:: The density field on the mesh is normalized as :math:`1+\delta`, such that the collective mean of the field is unity. See the :ref:`documentation <painting-mesh>` on painting for more details on painting catalogs to a mesh. Returns ------- real : :class:`pmesh.pm.RealField` the painted real field; this has a ``attrs`` dict storing meta-data """ pm = self.pm Nlocal = 0 # (unweighted) number of particles read on local rank Wlocal = 0 # (weighted) number of particles read on local rank W2local = 0 # sum of weight square. This is used to estimate shotnoise. # the paint brush window resampler = window.methods[self.resampler] # initialize the RealField to return if out is not None: assert isinstance(out, RealField), "output of to_real_field must be a RealField" numpy.testing.assert_array_equal(out.pm.Nmesh, pm.Nmesh) toret = out else: toret = RealField(pm) toret[:] = 0 # for interlacing, we need two empty meshes if out was provided # since out may have non-zero elements, messing up our interlacing sum if self.interlaced: real1 = RealField(pm) real1[:] = 0 # the second, shifted mesh (always needed) real2 = RealField(pm) real2[:] = 0 Position = self.Position Weight = self.Weight Value = self.Value Selection = self.Selection # ensure the slices are synced, since decomposition is collective Nlocalmax = max(pm.comm.allgather(len(Position))) H = pm.BoxSize / pm.Nmesh # paint data in chunks on each rank; # we do this by chunk 8 million is pretty big anyways. max_chunksize = _global_options['paint_chunk_size'] # use a local scope to avoid having two copies of data in memory def dochunk(s): if len(Position) != 0: # selection has to be computed many times when data is `large`. columns = [Position[s]] if Weight is not None: columns.append(Weight[s]) if Value is not None: columns.append(Value[s]) if Selection is not None: columns.append(Selection[s]) # be sure to use the source to compute data = self.source.compute(columns) sel = Ellipsis if Selection is None else data.pop() value = None if Value is None else data.pop()[sel] weight = None if Weight is None else data.pop()[sel] position = data.pop()[sel] else: # workaround a potential dask issue on empty dask arrays position = numpy.empty((0, 3), dtype=Position.dtype) weight = None value = None if weight is None: weight = numpy.ones(len(position)) if value is None: value = numpy.ones(len(position)) # track total (selected) number and sum of weights Nlocal = len(position) Wlocal = weight.sum() W2local = (weight ** 2).sum() # no interlacing if not self.interlaced: lay = pm.decompose(position, smoothing=0.5 * resampler.support) else: lay = pm.decompose(position, smoothing=1.0 * resampler.support) # if we are receiving too many particles, abort and retry with a smaller chunksize newlengths = pm.comm.allgather(lay.newlength) if any([newlength > 2 * max_chunksize for newlength in newlengths]): if pm.comm.rank == 0: self.logger.info("Throttling chunksize as some ranks will receive too many particles. (%d > %d)" % (max(newlengths), max_chunksize * 2)) raise StopIteration p = lay.exchange(position) w = lay.exchange(weight) v = lay.exchange(value) if not self.interlaced: pm.paint(p, mass=w * v, resampler=resampler, hold=True, out=toret) # interlacing: use 2 meshes separated by 1/2 cell size else: # in mesh units shifted = pm.affine.shift(0.5) # paint to two shifted meshes pm.paint(p, mass=w * v, resampler=resampler, hold=True, out=real1) pm.paint(p, mass=w * v, resampler=resampler, transform=shifted, hold=True, out=real2) return Nlocal, Wlocal, W2local import gc i = 0 chunksize = max_chunksize while i < Nlocalmax: s = slice(i, i + chunksize) if pm.comm.rank == 0: self.logger.info("Chunk %d ~ %d / %d " % (i, i + chunksize, Nlocalmax)) try: Nlocal1, Wlocal1, W2local1 = dochunk(s) chunksize = min(max_chunksize, int(chunksize * 1.5)) except StopIteration: chunksize = chunksize // 2 if chunksize < 1: raise RuntimeError("Cannot find a chunksize that fits into memory.") continue finally: # collect unfreed items gc.collect() Nlocal += Nlocal1 Wlocal += Wlocal1 W2local += W2local1 Nglobal = pm.comm.allreduce(Nlocal) if pm.comm.rank == 0: self.logger.info("painted %d out of %d objects to mesh" % (Nglobal, self.source.csize)) i = i + chunksize # now the loop over particles is done if not self.interlaced: # nothing to do, toret is already filled. pass else: # compose the two interlaced fields into the final result. c1 = real1.r2c() c2 = real2.r2c() # and then combine for k, s1, s2 in zip(c1.slabs.x, c1.slabs, c2.slabs): kH = sum(k[i] * H[i] for i in range(3)) s1[...] = s1[...] * 0.5 + s2[...] * 0.5 * numpy.exp(0.5 * 1j * kH) # FFT back to real-space # NOTE: cannot use "toret" here in case user supplied "out" c1.c2r(real1) # need to add to the returned mesh if user supplied "out" toret[:] += real1[:] # unweighted number of objects N = pm.comm.allreduce(Nlocal) # weighted number of objects W = pm.comm.allreduce(Wlocal) # weighted number of objects W2 = pm.comm.allreduce(W2local) # weighted number density (objs/cell) nbar = 1. * W / numpy.prod(pm.Nmesh) # make sure we painted something or nbar is nan; in which case # we set the density to uniform everywhere. if N == 0: warnings.warn(("trying to paint particle source to mesh, " "but no particles were found!"), RuntimeWarning ) # shot noise is volume / un-weighted number shotnoise = numpy.prod(pm.BoxSize) * W2 / W ** 2 # save some meta-data toret.attrs = {} toret.attrs['shotnoise'] = shotnoise toret.attrs['N'] = N toret.attrs['W'] = W toret.attrs['W2'] = W toret.attrs['num_per_cell'] = nbar csum = toret.csum() if pm.comm.rank == 0: self.logger.info("painted %d out of %d objects to mesh" %(N, self.source.csize)) self.logger.info("mean particles per cell is %g", nbar) self.logger.info("sum is %g ", csum) if normalize: if nbar > 0: toret[...] /= nbar else: toret[...] = 1 if pm.comm.rank == 0: self.logger.info("normalized the convention to 1 + delta") return toret
def to_real_field(self, out=None, normalize=True): """ Paint the density field, by interpolating the position column on to the mesh. This computes the following meta-data attributes in the process of painting, returned in the :attr:`attrs` attributes of the returned RealField object: - N : int the (unweighted) total number of objects painted to the mesh - W : float the weighted number of total objects, equal to the collective sum of the 'weight' column - shotnoise : float the Poisson shot noise, equal to the volume divided by ``N`` - num_per_cell : float the mean number of weighted objects per cell .. note:: The density field on the mesh is normalized as :math:`1+\delta`, such that the collective mean of the field is unity. See the :ref:`documentation <painting-mesh>` on painting for more details on painting catalogs to a mesh. Returns ------- real : :class:`pmesh.pm.RealField` the painted real field; this has a ``attrs`` dict storing meta-data """ # check for 'Position' column if self.position not in self.source: msg = "in order to paint a CatalogSource to a RealField, add a " msg += "column named '%s', representing the particle positions" % self.position raise ValueError(msg) pm = self.pm Nlocal = 0 # (unweighted) number of particles read on local rank Wlocal = 0 # (weighted) number of particles read on local rank # the paint brush window paintbrush = window.methods[self.window] # initialize the RealField to return if out is not None: assert isinstance( out, RealField), "output of to_real_field must be a RealField" numpy.testing.assert_array_equal(out.pm.Nmesh, pm.Nmesh) toret = out else: toret = RealField(pm) toret[:] = 0 # for interlacing, we need two empty meshes if out was provided # since out may have non-zero elements, messing up our interlacing sum if self.interlaced: real1 = RealField(pm) real1[:] = 0 # the second, shifted mesh (always needed) real2 = RealField(pm) real2[:] = 0 # read the necessary data (as dask arrays) columns = [self.position, self.weight, self.value, self.selection] Position, Weight, Value, Selection = self.source.read(columns) # ensure the slices are synced, since decomposition is collective Nlocalmax = max(pm.comm.allgather(len(Position))) # paint data in chunks on each rank; # we do this by chunk 8 million is pretty big anyways. chunksize = _global_options['paint_chunk_size'] for i in range(0, Nlocalmax, chunksize): s = slice(i, i + chunksize) if len(Position) != 0: # selection has to be computed many times when data is `large`. sel = self.source.compute(Selection[s]) # be sure to use the source to compute position, weight, value = \ self.source.compute(Position[s], Weight[s], Value[s]) # FIXME: investigate if move selection before compute # speeds up IO. position = position[sel] weight = weight[sel] value = value[sel] else: # workaround a potential dask issue on empty dask arrays position = numpy.empty((0, 3), dtype=Position.dtype) weight = None value = None selection = None if weight is None: weight = numpy.ones(len(position)) if value is None: value = numpy.ones(len(position)) # track total (selected) number and sum of weights Nlocal += len(position) Wlocal += weight.sum() # no interlacing if not self.interlaced: lay = pm.decompose(position, smoothing=0.5 * paintbrush.support) p = lay.exchange(position) w = lay.exchange(weight) v = lay.exchange(value) pm.paint(p, mass=w * v, resampler=paintbrush, hold=True, out=toret) # interlacing: use 2 meshes separated by 1/2 cell size else: lay = pm.decompose(position, smoothing=1.0 * paintbrush.support) p = lay.exchange(position) w = lay.exchange(weight) v = lay.exchange(value) H = pm.BoxSize / pm.Nmesh # in mesh units shifted = pm.affine.shift(0.5) # paint to two shifted meshes pm.paint(p, mass=w * v, resampler=paintbrush, hold=True, out=real1) pm.paint(p, mass=w * v, resampler=paintbrush, transform=shifted, hold=True, out=real2) Nglobal = pm.comm.allreduce(Nlocal) if pm.comm.rank == 0: self.logger.info("painted %d out of %d objects to mesh" % (Nglobal, self.source.csize)) # now the loop over particles is done if not self.interlaced: # nothing to do, toret is already filled. pass else: # compose the two interlaced fields into the final result. c1 = real1.r2c() c2 = real2.r2c() # and then combine for k, s1, s2 in zip(c1.slabs.x, c1.slabs, c2.slabs): kH = sum(k[i] * H[i] for i in range(3)) s1[...] = s1[...] * 0.5 + s2[...] * 0.5 * numpy.exp( 0.5 * 1j * kH) # FFT back to real-space # NOTE: cannot use "toret" here in case user supplied "out" c1.c2r(real1) # need to add to the returned mesh if user supplied "out" toret[:] += real1[:] # unweighted number of objects N = pm.comm.allreduce(Nlocal) # weighted number of objects W = pm.comm.allreduce(Wlocal) # weighted number density (objs/cell) nbar = 1. * W / numpy.prod(pm.Nmesh) # make sure we painted something or nbar is nan; in which case # we set the density to uniform everywhere. if N == 0: warnings.warn(("trying to paint particle source to mesh, " "but no particles were found!"), RuntimeWarning) # shot noise is volume / un-weighted number shotnoise = numpy.prod(pm.BoxSize) / N # save some meta-data toret.attrs = {} toret.attrs['shotnoise'] = shotnoise toret.attrs['N'] = N toret.attrs['W'] = W toret.attrs['num_per_cell'] = nbar csum = toret.csum() if pm.comm.rank == 0: self.logger.info("painted %d out of %d objects to mesh" % (N, self.source.csize)) self.logger.info("mean particles per cell is %g", nbar) self.logger.info("sum is %g ", csum) self.logger.info("normalized the convention to 1 + delta") if normalize: if nbar > 0: toret[...] /= nbar else: toret[...] = 1 return toret