def __init__(self, array, BoxSize, comm=None, root=0, **kwargs): if comm.rank == root: array = numpy.array(array) if array.dtype.kind == 'c': # transform to real for the correct shape array = numpy.fft.irfftn(array) array[...] *= numpy.prod(array.shape) shape = array.shape dtype = array.dtype else: array, dtype, shape = [None] * 3 dtype = comm.bcast(dtype, root=root) shape = comm.bcast(shape, root=root) assert len(shape) in (2, 3) Nmesh = shape empty = numpy.empty((0, ), dtype) MeshSource.__init__(self, comm, Nmesh, BoxSize, empty.real.dtype) self.field = self.pm.create(type='real') if comm.rank != root: array = empty # ignore data from other ranks. else: array = array.ravel() # fill the field with the array self.field.unravel(array)
def __init__(self, Plin, BoxSize, Nmesh, seed=None, unitary_amplitude=False, inverted_phase=False, remove_variance=None, comm=None): self.Plin = Plin # cosmology and communicator self.comm = comm self.attrs.update(attrs_to_dict(Plin, 'plin.')) # set the seed randomly if it is None if seed is None: if self.comm.rank == 0: seed = numpy.random.randint(0, 4294967295) seed = self.comm.bcast(seed) self.attrs['seed'] = seed if remove_variance is not None: unitary_amplitude = remove_variance self.attrs['unitary_amplitude'] = unitary_amplitude self.attrs['inverted_phase'] = inverted_phase MeshSource.__init__(self, BoxSize=BoxSize, Nmesh=Nmesh, dtype='f4', comm=comm)
def __init__(self, array, BoxSize, comm=None, root=0, **kwargs): if comm.rank == root: array = numpy.array(array) if array.dtype.kind == 'c': # transform to real for the correct shape array = numpy.fft.irfftn(array) array[...] *= numpy.prod(array.shape) shape = array.shape dtype = array.dtype else: array, dtype, shape = [None] * 3 dtype = comm.bcast(dtype, root=root) shape = comm.bcast(shape, root=root) assert len(shape) in (2, 3) Nmesh = shape empty = numpy.empty((0,), dtype) MeshSource.__init__(self, comm, Nmesh, BoxSize, empty.real.dtype) self.field = self.pm.create(type='real') if comm.rank != root: array = empty # ignore data from other ranks. else: array = array.ravel() # fill the field with the array self.field.unravel(array)
def __init__(self, source, Nmesh, BoxSize, dtype, selection, position, weight, value, interlaced, compensated, resampler): from nbodykit.source.catalog import MultipleSpeciesCatalog if not isinstance(source, MultipleSpeciesCatalog): raise TypeError(("the input source for MultipleSpeciesCatalogMesh " "must be a MultipleSpeciesCatalog")) MeshSource.__init__(self, Nmesh=Nmesh, BoxSize=BoxSize, dtype=dtype, comm=source.comm) self.source = source self.weight = weight self.position = position self.value = value self.selection = selection self.interlaced = interlaced self.compensated = compensated self.resampler = resampler self.dtype = dtype self.species = source.species
def __init__(self, path, dataset, comm=None, **kwargs): self.path = path self.dataset = dataset self.comm = comm # update the meta-data self.attrs.update(kwargs) with BigFileMPI(comm=self.comm, filename=path)[dataset] as ff: for key in ff.attrs: v = ff.attrs[key] if isinstance(v, string_types) and v.startswith('json://'): self.attrs[key] = json.loads(v[7:], cls=JSONDecoder) else: self.attrs[key] = numpy.squeeze(v) # fourier space or config space if ff.dtype.kind == 'c': self.isfourier = True if ff.dtype.itemsize == 16: dtype = 'f8' else: dtype = 'f4' else: self.isfourier = False if ff.dtype.itemsize == 8: dtype = 'f8' else: dtype = 'f4' # determine Nmesh if 'ndarray.shape' not in self.attrs: raise ValueError( "`ndarray.shape` should be stored in the Bigfile `attrs` to determine `Nmesh`" ) if 'Nmesh' not in self.attrs: raise ValueError( "`ndarray.shape` should be stored in the Bigfile `attrs` to determine `Nmesh`" ) Nmesh = self.attrs['Nmesh'] BoxSize = self.attrs['BoxSize'] MeshSource.__init__(self, BoxSize=BoxSize, Nmesh=Nmesh, dtype=dtype, comm=comm)
def __new__(cls, source, BoxSize, Nmesh, dtype, weight, value, selection, position='Position', interlaced=False, compensated=False, window='cic', **kwargs): # source here must be a CatalogSource assert isinstance(source, CatalogSourceBase) # new, empty CatalogSource obj = CatalogSourceBase.__new__(cls, source.comm) # copy over size from the CatalogSource obj._size = source.size obj._csize = source.csize # copy over the necessary meta-data to attrs obj.attrs['BoxSize'] = BoxSize obj.attrs['Nmesh'] = Nmesh obj.attrs['interlaced'] = interlaced obj.attrs['compensated'] = compensated obj.attrs['window'] = window # copy meta-data from source too obj.attrs.update(source.attrs) # store others as straight attributes obj.dtype = dtype obj.weight = weight obj.value = value obj.selection = selection obj.position = position # add in the Mesh Source attributes MeshSource.__init__(obj, obj.comm, Nmesh, BoxSize, dtype) # finally set the base as the input CatalogSource # NOTE: set this AFTER MeshSource.__init__() obj.base = source return obj
def __init__(self, path, dataset, comm=None, **kwargs): self.path = path self.dataset = dataset self.comm = comm # update the meta-data self.attrs.update(kwargs) with FileMPI(comm=self.comm, filename=path)[dataset] as ff: for key in ff.attrs: v = ff.attrs[key] if isinstance(v, string_types) and v.startswith('json://'): self.attrs[key] = json.loads(v[7:], cls=JSONDecoder) else: self.attrs[key] = numpy.squeeze(v) # fourier space or config space if ff.dtype.kind == 'c': self.isfourier = True if ff.dtype.itemsize == 16: dtype = 'f8' else: dtype = 'f4' else: self.isfourier = False if ff.dtype.itemsize == 8: dtype = 'f8' else: dtype = 'f4' # determine Nmesh if 'ndarray.shape' not in self.attrs: raise ValueError("`ndarray.shape` should be stored in the Bigfile `attrs` to determine `Nmesh`") if 'Nmesh' not in self.attrs: raise ValueError("`ndarray.shape` should be stored in the Bigfile `attrs` to determine `Nmesh`") Nmesh = self.attrs['Nmesh'] BoxSize = self.attrs['BoxSize'] MeshSource.__init__(self, BoxSize=BoxSize, Nmesh=Nmesh, dtype=dtype, comm=comm)
def __init__(self, source, BoxSize, Nmesh, dtype, weight, value, selection, position='Position', interlaced=False, compensated=False, window='cic', **kwargs): # source here must be a CatalogSource assert isinstance(source, CatalogSourceBase) # copy over the necessary meta-data to attrs self.attrs['BoxSize'] = BoxSize self.attrs['Nmesh'] = Nmesh self.attrs['interlaced'] = interlaced self.attrs['compensated'] = compensated self.attrs['window'] = window # copy meta-data from source too self.attrs.update(source.attrs) self.source = source # store others as straight attributes self.dtype = dtype self.weight = weight self.value = value self.selection = selection self.position = position # add in the Mesh Source attributes MeshSource.__init__(self, source.comm, Nmesh, BoxSize, dtype)
def __init__(self, source, Nmesh, BoxSize, Position, dtype='f4', resampler='cic', compensated=False, interlaced=False, Value=None, Selection=None, Weight=None, **kwargs): from nbodykit.base.catalog import CatalogSourceBase assert isinstance(source, CatalogSourceBase) self._columns = {} # copy meta-data from source too self.attrs.update(source.attrs) MeshSource.__init__(self, source.comm, Nmesh, BoxSize, dtype) self.source = source # store others as straight attributes self.dtype = dtype self.Position = Position self.Weight = Weight self.Value = Value self.Selection = Selection self.attrs['interlaced'] = interlaced self.attrs['compensated'] = compensated self.attrs['resampler'] = str(resampler)
def save(self, output, dataset='Field', mode='real'): """ Save the mesh as a :class:`~nbodykit.source.mesh.bigfile.BigFileMesh` on disk, either in real or complex space. Parameters ---------- output : str name of the bigfile file dataset : str, optional name of the bigfile data set where the field is stored mode : str, optional real or complex; the form of the field to store """ return MeshSource.save(self, output, dataset=dataset, mode=mode)
def __finalize__(self, other): """ Finalize the creation of a CatalogMesh object by copying over attributes from a second CatalogMesh. This also copies over the relevant MeshSource attributes via a call to :func:`MeshSource.__finalize__`. Parameters ---------- obj : CatalogMesh the second CatalogMesh to copy over attributes from """ if isinstance(other, CatalogSourceBase): self = CatalogSourceBase.__finalize__(self, other) if isinstance(other, MeshSource): self = MeshSource.__finalize__(self, other) return self
def __init__(self, data, ran, Nmesh, bias=1.0, f = 0.0, los = [0, 0, 1], R=20, position='Position', revert_rsd_random=False, scheme='LGS', BoxSize=None, comm=None): assert scheme in ['LGS', 'LF2', 'LRR'] assert isinstance(data, CatalogSource) assert isinstance(ran, CatalogSource) from pmesh.pm import ParticleMesh if Nmesh is None: Nmesh = data.attrs['Nmesh'] _Nmesh = numpy.empty(3, dtype='i8') _Nmesh[...] = Nmesh if BoxSize is None: BoxSize = data.attrs['BoxSize'] los = numpy.array(los, dtype='f8', copy=True) los /= (los ** 2).sum() assert len(los) == 3 assert (~numpy.isnan(los)).all() pm = ParticleMesh(BoxSize=BoxSize, Nmesh=_Nmesh, comm=comm) self.pm = pm if (self.pm.BoxSize / self.pm.Nmesh).max() > R: if comm.rank == 0: warnings.warn("The smoothing radius smaller than the mesh cell size. This may produce undesired numerical results.") assert position in data.columns assert position in ran.columns self.position = position MeshSource.__init__(self, comm, pm.Nmesh.copy(), pm.BoxSize.copy(), pm.dtype) self.attrs['bias'] = bias self.attrs['f'] = f self.attrs['los'] = los self.attrs['R'] = R self.attrs['scheme'] = scheme self.attrs['revert_rsd_random'] = bool(revert_rsd_random) self.data = data self.ran = ran if self.comm.rank == 0: self.logger.info("Reconstruction for bias=%g, f=%g, smoothing R=%g los=%s" % (self.attrs['bias'], self.attrs['f'], self.attrs['R'], str(self.attrs['los']))) self.logger.info("Reconstruction scheme = %s" % (self.attrs['scheme']))
def __init__(self, data, ran, Nmesh, bias=1.0, f = 0.0, los = [0, 0, 1], R=20, position='Position', revert_rsd_random=False, scheme='LGS', BoxSize=None): assert scheme in ['LGS', 'LF2', 'LRR'] assert isinstance(data, CatalogSource) assert isinstance(ran, CatalogSource) comm = data.comm assert data.comm == ran.comm from pmesh.pm import ParticleMesh if Nmesh is None: Nmesh = data.attrs['Nmesh'] _Nmesh = numpy.empty(3, dtype='i8') _Nmesh[...] = Nmesh if BoxSize is None: BoxSize = data.attrs['BoxSize'] los = numpy.array(los, dtype='f8', copy=True) los /= (los ** 2).sum() assert len(los) == 3 assert (~numpy.isnan(los)).all() pm = ParticleMesh(BoxSize=BoxSize, Nmesh=_Nmesh, comm=comm) self.pm = pm if (self.pm.BoxSize / self.pm.Nmesh).max() > R: if comm.rank == 0: warnings.warn("The smoothing radius smaller than the mesh cell size. This may produce undesired numerical results.") assert position in data.columns assert position in ran.columns self.position = position MeshSource.__init__(self, comm, pm.Nmesh.copy(), pm.BoxSize.copy(), pm.dtype) self.attrs['bias'] = bias self.attrs['f'] = f self.attrs['los'] = los self.attrs['R'] = R self.attrs['scheme'] = scheme self.attrs['revert_rsd_random'] = bool(revert_rsd_random) self.data = data self.ran = ran if self.comm.rank == 0: self.logger.info("Reconstruction for bias=%g, f=%g, smoothing R=%g los=%s" % (self.attrs['bias'], self.attrs['f'], self.attrs['R'], str(self.attrs['los']))) self.logger.info("Reconstruction scheme = %s" % (self.attrs['scheme']))