Exemple #1
0
def test_constarray(comm):
    a = ConstantArray(1.0, 1, chunks=1000)
    assert len(a) == 1
    assert a.shape == (1,)
    a = ConstantArray([1.0, 1.0], 1, chunks=1000)
    assert a.shape == (1, 2)

    a = ConstantArray([1.0, 1.0], 3, chunks=1000)
    assert a.shape == (3, 2)
Exemple #2
0
    def run(self):
        """
        Run the algorithm, which computes the histogram. This function
        does not return anything, but adds the following attributes
        to the class:

        - :attr:`bin_edges`
        - :attr:`bin_centers`
        - :attr:`dV`
        - :attr:`nbar`

        .. note::
            All ranks store the same result attributes.

        Attributes
        ----------
        bin_edges : array_like
            the edges of the redshift bins
        bin_centers : array_like
            the center values of each redshift bin
        dV : array_like
            the volume of each redshift shell in units of :math:`(\mathrm{Mpc}/h)^3`
        nbar : array_like
            the values of the redshift histogram, normalized to
            number density (in units of :math:`(\mathrm{Mpc}/h)^{-3}`)
        """
        edges = self.attrs['edges']

        # get the columns
        redshift = self.source[self.attrs['redshift']]
        if self.attrs['weight'] is not None:
            weight = self.source[self.attrs['weight']]
            if self.comm.rank == 0:
                self.logger.info("computing histogram using weights from '%s' column" %self.attrs['weight'])
        else:
            weight = ConstantArray(1.0, self.source.size)

        # compute the numpy arrays from dask
        redshift, weight = self.source.compute(redshift, weight)

        # do the bin count, using the specified weight values
        dig = numpy.searchsorted(edges, redshift, "right")
        N = numpy.bincount(dig, weights=weight, minlength=len(edges)+1)[1:-1]

        # now sum across all ranks
        N = self.comm.allreduce(N)

        # compute the volume
        if self.comm.rank == 0:
            self.logger.info("using cosmology %s to compute volume in units of (Mpc/h)^3" %str(self.cosmo))
            self.logger.info("sky fraction used in volume calculation: %.4f" %self.attrs['fsky'])
        R_hi = self.cosmo.comoving_distance(edges[1:]) # in Mpc/h
        R_lo = self.cosmo.comoving_distance(edges[:-1]) # in Mpc/h
        dV   = (4./3.)*numpy.pi*(R_hi**3 - R_lo**3) * self.attrs['fsky']

        # store the results
        self.bin_edges   = edges
        self.bin_centers = 0.5*(edges[:-1] + edges[1:])
        self.dV          = dV
        self.nbar        = 1.*N/dV
Exemple #3
0
    def Selection(self):
        """
        A boolean column that selects a subset slice of the CatalogSource.

        By default, this column is set to ``True`` for all particles, and
        all CatalogSource objects will contain this column.
        """
        return ConstantArray(True, self.size, chunks=_global_options['dask_chunk_size'])
Exemple #4
0
    def Weight(self):
        """
        The column giving the weight to use for each particle on the mesh.

        The mesh field is a weighted average of ``Value``, with the weights
        given by ``Weight``.

        By default, this array is set to unity for all particles, and
        all CatalogSource objects will contain this column.
        """
        return ConstantArray(1.0, self.size, chunks=_global_options['dask_chunk_size'])
Exemple #5
0
    def Value(self):
        """
        When interpolating a CatalogSource on to a mesh, the value of this
        array is used as the Value that each particle contributes to a given
        mesh cell.

        The mesh field is a weighted average of ``Value``, with the weights
        given by ``Weight``.

        By default, this array is set to unity for all particles, and
        all CatalogSource objects will contain this column.
        """
        return ConstantArray(1.0, self.size, chunks=_global_options['dask_chunk_size'])
Exemple #6
0
 def to_catalog(self, **kwargs):
     from nbodykit.lab import ArrayCatalog
     from nbodykit.transform import ConstantArray
     Omega = self.Omega(self.a['S'])
     source = ArrayCatalog(
         {
             'Position': self.X,
             'Velocity': self.V,
             'Weight': ConstantArray(Omega, len(self.X))
         },
         BoxSize=self.BoxSize,
         Omega=Omega,
         Omega0=self.Omega(1.0),
         Time=self.a['S'],
         comm=self.comm,
         **kwargs)
     return source
Exemple #7
0
def to_catalog(STATE, **kwargs):
    from nbodykit.lab import ArrayCatalog
    from nbodykit.transform import ConstantArray
    Omega = 0.27  #Omega(self.a['S'])
    source = ArrayCatalog(
        {
            'Position': STATE[0, 0],
            'Velocity': STATE[1, 0],
            'Weight': ConstantArray(Omega, len(STATE[0, 0]))
        },
        BoxSize=[64, 64, 64],
        Nmesh=[64, 64, 64],
        Omega=Omega,
        Omega0=0.27,
        Time=0.228,
        **kwargs)
    return source
Exemple #8
0
    def __setitem__(self, col, value):
        """
        Add columns to the CatalogSource, overriding any existing columns
        with the name ``col``.
        """
        # handle scalar values
        if numpy.isscalar(value):
            assert self.size is not NotImplemented, "size is not implemented! cannot set scalar array"
            value = ConstantArray(value, self.size, chunks=_global_options['dask_chunk_size'])

        # check the correct size, if we know the size
        if self.size is not NotImplemented:
            args = (col, self.size, len(value))
            msg = "error setting '%s' column, data must be array of size %d, not %d" % args
            assert len(value) == self.size, msg

        # call the base __setitem__
        CatalogSourceBase.__setitem__(self, col, value)