Exemplo n.º 1
0
    def take_snapshots(self,
                       snapshots,
                       ptype,
                       origin=None,
                       boxsize=None,
                       np=None):
        """ ptype can be a list of ptypes, in which case all particles of the types are loaded into the field """
        if numpy.isscalar(ptype):
            ptypes = [ptype]
        else:
            ptypes = ptype

        ptype = None

        nptypes = len(snapshots[0].C['N'])
        N = numpy.zeros((len(snapshots), nptypes), dtype='i8')
        O = N.copy()

        with sharedmem.TPool(np=np) as pool:

            def work(i):
                snapshot = snapshots[i]
                for ptype in ptypes:
                    mask, count = filter(snapshot, ptype, origin, boxsize)
                    N[i, ptype] = count
                snapshot.clear()

            pool.map(work, range(len(snapshots)))

        O.flat[1:] = N.cumsum()[:-1]
        O = O.reshape(*N.shape)
        self.numpoints = N.sum()
        for comp in self.names:
            shape = list(self[comp].shape)
            shape[0] = self.numpoints
            self[comp] = numpy.zeros(shape, self[comp].dtype)

        with sharedmem.TPool(np=np) as pool:

            def work(i):
                snapshot = snapshots[i]
                for ptype in ptypes:
                    mask, count = filter(snapshot, ptype, origin, boxsize)
                    for block in snapshot.schema:
                        if N[i, ptype] == 0: continue
                        if (ptype, block) not in snapshot: continue
                        if block not in self.names: continue
                        data = select(snapshot, ptype, block, mask)
                        self[block][O[i,
                                      ptype]:O[i, ptype] + N[i, ptype]] = data
                snapshot.clear()

            pool.map(work, range(len(snapshots)))
Exemplo n.º 2
0
    def paint2(self,
               ftype,
               color,
               luminosity,
               camera=None,
               kernel=None,
               dtype='f8'):
        """ paint field to CCD, (this paints the tree nodes)
         returns
        C, L where
          C is the color of the pixel
          L is the exposure of the pixel
        Notice that if color is None, 
          C will be undefined,
          L will still be the exposure.
        the return values can be normalized by
        nl_ or n_, then feed to imshow
    """
        raise "Fix this."
        CCD = numpy.zeros(self.shape, dtype=(dtype, 2))

        tree = self.T[ftype]
        if kernel is None: kernel = 'spline'
        color, luminosity = self._getcomponent(ftype, color, luminosity)

        if color is None:
            color = 1.0
        if luminosity is None:
            luminosity = 1.0

        colorp = TreeProperty(tree, color)
        luminosityp = TreeProperty(tree, luminosity)

        for cam in self._mkcameras(camera):
            with sharedmem.TPool(np=self.np) as pool:
                cams = cam.divide(int(pool.np**0.5 * 2), int(pool.np**0.5 * 2))

                def work(cam, offx, offy):
                    mask = cam.prunetree(tree)
                    x, y, z = tree['pos'][mask].T
                    sml = tree['size'][mask][:, 0].copy() * 2
                    luminosity = luminosityp[mask]
                    color = colorp[mask]
                    smallCCD = numpy.zeros(cam.shape, dtype=(dtype, 2))
                    cam.paint(x,
                              y,
                              z,
                              sml,
                              color,
                              luminosity,
                              kernel=kernel,
                              out=smallCCD)
                    CCD[offx:offx + cam.shape[0],
                        offy:offy + cam.shape[1], :] += smallCCD

                pool.starmap(work, cams.reshape(-1, 3))

        C, L = CCD[..., 0], CCD[..., 1]
        C[...] /= L
        return C, L
Exemplo n.º 3
0
def profile(field, component, center, rmin, rmax, weights=None, logscale=True, nbins=100, density=True, integrated=True):
  """ returns centers, profile, and with of the bins, 
      if density == True, divide by volume 
      if integrated == True, use the sum of full enclose volume. otherwise use the shell"""
  locations = field['locations']
  component, weights = getcomponent(None, field, component, weights)
  if logscale:
    rmin = numpy.log10(rmin)
    rmax = numpy.log10(rmax)
  bins = numpy.linspace(rmin, rmax, nbins + 1, endpoint=True)
  if integrated:
    centers = bins[1:]
  else:
    centers = (bins[:-1] + bins[1:]) * 0.5
  if logscale:
    centers = 10 ** centers
    bins = 10 ** bins


  profil = numpy.zeros(nbins + 2, dtype='f8')
  weight = numpy.zeros(nbins + 2, dtype='f8') # for the profile

   
  with sharedmem.TPool() as pool:
    chunksize = 1024 * 1024
    def work(i):
      sl = slice(i, i + chunksize)
      r = ((locations[sl] - center) ** 2).sum(axis=-1) ** 0.5
      dig = numpy.digitize(r, bins)
      if weights is not None:
        p = numpy.bincount(dig, weights=component[sl] * weights[sl], minlength=nbins+2)
        w = numpy.bincount(dig, weights=weights[sl], minlength=nbins+2)
        return p, w
      else:
        p = numpy.bincount(dig, weights=component[sl], minlength=nbins+2)
        return p, None
    def reduce(p, w):
      if weights is not None:
        profil[:] += p
        weight[:] += w
      else:
        profil[:] += p
    pool.map(work, range(0, len(locations), chunksize), reduce=reduce)
  if integrated:
    profil = profil.cumsum()
    weight = weight.cumsum()

  if weights is not None:
    profil /= weight

  if density:
    if integrated:
      profil[1:-1] /= 4 / 3.0 * 3.1416 * bins[1:] ** 3
    else:
      profil[1:-1] /= numpy.diff(4 / 3.0 * 3.1416 * bins ** 3)

  return centers, profil[1:-1], numpy.diff(bins)[:-1]
Exemplo n.º 4
0
    def read(self, ftypes, fids=None, np=None):
        """ read the field from given list of fids, 
        using at most np threads to read. (default is self.np)
        0 is serial.
        if fids is None, the cut defined in 'use' will be used.

        returns the field of this ftype.

        build the tree if the schema says so.

    """
        if self.need_cut:
            if fids is None and self.map is not None:
                fids = self.map.cut(self.origin, self.boxsize)

        if np is None:
            np = self.np

        if fids is not None:
            snapnames = [self.snapname % i for i in fids]
        elif '%d' in self.snapname:
            snapnames = [self.snapname % i for i in range(self.C['Nfiles'])]
        else:
            snapnames = [self.snapname]

        def getsnap(snapname):
            try:
                return Snapshot(snapname, self.format, template=self._template)
            except IOError as e:
                warnings.warn('file %s skipped for %s' % (snapname, str(e)))
            return None

        with sharedmem.TPool(np=np) as pool:
            snapshots = filter(lambda x: x is not None,
                               pool.map(getsnap, snapnames))

        rt = []
        for ftype in _ensurelist(ftypes):
            if self.need_cut:
                self.F[ftype].take_snapshots(snapshots,
                                             ptype=self.P[ftype],
                                             boxsize=self.boxsize,
                                             origin=self.origin,
                                             np=np)
            else:
                self.F[ftype].take_snapshots(snapshots,
                                             ptype=self.P[ftype],
                                             np=np)
            self.buildtree(ftype)
            rt += [self[ftype]]

        if numpy.isscalar(ftypes):
            return rt[0]
        else:
            return rt
Exemplo n.º 5
0
    def dump_snapshots(self,
                       snapshots,
                       ptype,
                       save_and_clear=False,
                       C=None,
                       np=None):
        """ dump field into snapshots.
        if save_and_clear is True, immediately save the file and clear the snapshot object,
        using less memory.
        otherwise, leave the data in memory in snapshot object. and only the header is written.
        C is the template used for the snapshot headers.
    """
        Nfile = len(snapshots)
        starts = numpy.zeros(dtype='u8', shape=Nfile)
        for i in range(Nfile):
            snapshot = snapshots[i]
            if C is not None:
                snapshot.C[...] = C
            starts[i] = self.numpoints * i / Nfile
            snapshot.C['N'][ptype] = self.numpoints * (
                i + 1) / Nfile - self.numpoints * i / Nfile
            tmp = snapshot.C['Ntot']
            tmp[ptype] = self.numpoints
            snapshot.C['Ntot'] = tmp
            snapshot.C['Nfiles'] = Nfile
        skipped_comps = set([])

        def work(i):
            snapshot = snapshots[i]
            if save_and_clear:
                snapshot.create_structure()

            for comp in self.names:
                try:
                    dtype = snapshot.reader[comp].dtype
                except KeyError:
                    skipped_comps.update(set([comp]))
                    continue
                snapshot[ptype, comp] = numpy.array(
                    self[comp][starts[i]:starts[i] + snapshot.C['N'][ptype]],
                    dtype=dtype.base,
                    copy=False)

                if save_and_clear:
                    snapshot.save(comp, ptype)
                    snapshot.clear(comp, ptype)
            #skip if the reader doesn't save the block

        with sharedmem.TPool(np=np) as pool:
            pool.map(work, list(range(Nfile)))

        if skipped_comps:
            warnings.warn('warning: blocks not supported in snapshot: %s',
                          str(skipped_comps))
Exemplo n.º 6
0
    def select(self, ftype, sml=0, camera=None):
        """ return a mask whether particles are in the camera """
        locations, = self._getcomponent(ftype, 'locations')
        x, y, z = locations.T
        mask = numpy.zeros(len(x), dtype='?')
        for cam in self._mkcameras(camera):
            with sharedmem.TPool(np=self.np) as pool:
                chunksize = 1024 * 1024

                def work(i):
                    sl = slice(i, i + chunksize)
                    mask[sl] |= (cam.mask(x[sl], y[sl], z[sl], sml[sl]) != 0)

                pool.map(work, range(0, len(mask), chunksize))
        return mask
Exemplo n.º 7
0
    def unfold(self, M, ftype=None, center=None):
        """ unfold the field position by transformation M
        the field shall be periodic. M is an
        list of column integer vectors of the shearing
        vectors. abs(det(M)) = 1
        the field has to be in a cubic box located from (0,0,0)
    """
        assert self.periodic
        from gaepsi.compiledbase.geometry import Rotation, Cubenoid
        if center is None:
            cub = Cubenoid(M, self.origin, self.boxsize, center=0, neworigin=0)
        else:
            cub = Cubenoid(M, self.origin, self.boxsize, center=center)

        self.boxsize[...] = cub.newboxsize
        self.origin[...] = cub.neworigin
        self.periodic = False

        if ftype is None: ftypes = self.F.keys()
        else: ftypes = _ensurelist(ftype)
        for ftype in ftypes:
            locations, = self._getcomponent(ftype, 'locations')
            x, y, z = locations.T
            with sharedmem.TPool(np=self.np) as pool:
                chunksize = 1024 * 1024

                def work(i):
                    sl = slice(i, i + chunksize)
                    rt = cub.apply(x[sl], y[sl], z[sl])
                    return (rt < 0).sum()

                badness = numpy.sum(pool.map(work, range(0, len(x),
                                                         chunksize)))
            if badness > 0:
                warnings.warn("some %d points are outside the box" % badness)
        for ftype in ftypes:
            if ftype in self.T and self.T[ftype] != False:
                self.buildtree(ftype)
Exemplo n.º 8
0
    def makeT(self, ftype, Xh=0.76, halo=False):
        """T will be in Kelvin"""
        gas = self.F[ftype]

        gas['T'] = numpy.empty(dtype='f4', shape=gas.numpoints)
        with sharedmem.TPool(np=self.np) as pool:
            chunksize = 1024 * 1024

            def work(i):
                sl = slice(i, i + chunksize)
                if halo:
                    gas['T'][sl] = gas['vel'][sl, 0]**2
                    gas['T'][sl] += gas['vel'][sl, 1]**2
                    gas['T'][sl] += gas['vel'][sl, 2]**2
                    gas['T'][sl] *= 0.5
                    gas['T'][sl] *= self.U.TEMPERATURE
                else:
                    self.cosmology.ie2T(ie=gas['ie'][sl],
                                        ye=gas['ye'][sl],
                                        Xh=Xh,
                                        out=gas['T'][sl])
                    gas['T'][sl] *= self.U.TEMPERATURE

            pool.map(work, range(0, len(gas['T']), chunksize))
Exemplo n.º 9
0
def qlf(cosmology, band, magnitude=False, returnraw=False):
  """ returns an scipy interpolated function for the hopkins 2007 QLF, 
      at a given band.
      band can be a frequency, or 'bol', 'blue', 'ir', 'soft', 'hard'.
      HOPKINS2007 cosmology is implied.
      result shall not depend on the input cosmology but the HOPKINS cosmology
      is implied in the fits.
      if return raw is true, return 
         zrange, Lbol, Lband, M_AB, S_nu, Phi
  """
  zbins = numpy.linspace(0, 6, 200)
  Lbolbins=numpy.linspace(8, 18, 300)

  U = cosmology.units
  banddict = {'bol':0, 'blue':-1, 'ir':-2, 'soft':-3, 'hard': -4}
  from scipy.interpolate import RectBivariateSpline
  if band in banddict: 
      band = banddict[band]

  else: band = _bandconv(U, band, hertz=True)
  key = band

  if key not in _qlf_interp:
    v = numpy.empty(numpy.broadcast(Lbolbins[None, :], zbins[:, None]).shape)
    Lband, M_AB, S_nu, Phi = _qlf.qlf(band, 1.0, Lbolbins)
    with sharedmem.TPool() as pool:
      def work(v, z):
        Lband_, M_AB_, S_nu, Phi = _qlf.qlf(band, z, Lbolbins)
        v[:] = Phi
      pool.starmap(work, zip(v, zbins))
    v /= (U.MPC ** 3)
    # Notice that hopkins used 3.9e33 ergs/s for Lsun, but we use a different number.
    # but the internal fits of his numbers
    # thus we skip the conversion, in order to match the fits with luminosity in terms of Lsun.
    # Lbol = Lbol - numpy.log10(U.SOLARLUMINOSITY/U.ERG*U.SECOND) + numpy.log10(3.9e33)
    data = numpy.empty(shape=len(Lbolbins),
        dtype=[
          ('Lbol', 'f4'),
          ('Lband', 'f4'),
          ('M_AB', 'f4'),
          ('S_nu', 'f4'),
          ('Phi', ('f4', v.shape[0]))])
    data['Lbol'] = Lbolbins
    data['Lband'] = Lband
    data['M_AB'] = M_AB
    data['S_nu'] = S_nu
    data['Phi'] = v.T
    _qlf_interp[key] = data
  data = _qlf_interp[key]
  if returnraw:
    return data.view(numpy.recarray)
  if magnitude:
    func = RectBivariateSpline(zbins, - data['M_AB'], data['Phi'].T)
    func.x = zbins
    func.y = -data['M_AB']
    func.z = data['Phi'].T
    return func
  else:
    func = RectBivariateSpline(zbins, data['Lband'], data['Phi'].T)
    func.x = zbins
    func.y = data['Lband']
    func.z = data['Phi'].T
    return func