Esempio n. 1
0
    def test_halo_catalog_boundary_particles(self):
        rs = np.random.RandomState(3670474)
        n_halos = 100
        fields = [
            "particle_%s" % name
            for name in ["mass"] + ["position_%s" % ax for ax in "xyz"]
        ]
        units = ["g"] + ["cm"] * 3
        data = dict((field, YTArray(rs.random_sample(n_halos), unit))
                    for field, unit in zip(fields, units))

        data["particle_position_x"][0] = 1.0
        data["particle_position_x"][1] = 0.0
        data["particle_position_y"][2] = 1.0
        data["particle_position_y"][3] = 0.0
        data["particle_position_z"][4] = 1.0
        data["particle_position_z"][5] = 0.0

        fn = fake_halo_catalog(data)
        ds = yt_load(fn)

        assert isinstance(ds, HaloCatalogDataset)

        for field in ["particle_mass"]:
            f1 = data[field].in_base()
            f1.sort()
            f2 = ds.r[field].in_base()
            f2.sort()
            assert_array_equal(f1, f2)
Esempio n. 2
0
 def _load_ds(self, filename, **kwargs):
     """
     Load a catalog as a yt dataset and call setup function.
     """
     ds = yt_load(filename, **kwargs)
     if self.setup_function is not None:
         self.setup_function(ds)
     return ds
Esempio n. 3
0
    def test_halo_catalog(self):
        rs = np.random.RandomState(3670474)
        n_halos = 100
        fields = ['particle_%s' % name for name in
                  ['mass'] + ['position_%s' % ax for ax in 'xyz']]
        units = ['g'] + ['cm']*3
        data = dict((field, YTArray(rs.random_sample(n_halos), unit))
                    for field, unit in zip(fields, units))

        fn = fake_halo_catalog(data)
        ds = yt_load(fn)

        assert isinstance(ds, HaloCatalogDataset)

        for field in fields:
            f1 = data[field].in_base()
            f1.sort()
            f2 = ds.r[field].in_base()
            f2.sort()
            assert_array_equal(f1, f2)
Esempio n. 4
0
    def test_halo_catalog(self):
        rs = np.random.RandomState(3670474)
        n_halos = 100
        fields = [
            f"particle_{name}" for name in ["mass"] + [f"position_{ax}" for ax in "xyz"]
        ]
        units = ["g"] + ["cm"] * 3
        data = dict(
            (field, YTArray(rs.random_sample(n_halos), unit))
            for field, unit in zip(fields, units)
        )

        fn = fake_halo_catalog(data)
        ds = yt_load(fn)

        assert isinstance(ds, HaloCatalogDataset)

        for field in fields:
            f1 = data[field].in_base()
            f1.sort()
            f2 = ds.r[field].in_base()
            f2.sort()
            assert_array_equal(f1, f2)
Esempio n. 5
0
    def trace_ancestors(self, halo_type, root_ids, fields=None, filename=None):
        """
        Trace the ancestry of a given set of halos.

        A merger-tree for a specific set of halos will be created,
        starting with the last halo catalog and moving backward.

        Parameters
        ----------
        halo_type : string
            The type of halo, typically "FOF" for FoF groups or
            "Subfind" for subhalos.
        root_ids : integer or array of integers
            The halo IDs from the last halo catalog for the
            targeted halos.
        fields : optional, list of strings
            List of additional fields to be saved to halo catalogs.
        filename : optional, string
            Directory in which merger-tree catalogs will be saved.
        """

        output_dir = os.path.dirname(filename)
        if self.comm.rank == 0 and len(output_dir) > 0:
            ensure_dir(output_dir)

        all_outputs = self.ts.outputs[::-1]
        ds1 = None

        for i, fn2 in enumerate(all_outputs[1:]):
            fn1 = all_outputs[i]
            target_filename = get_output_filename(
                filename, "%s.%d" % (_get_tree_basename(fn1), 0), ".h5")
            catalog_filename = get_output_filename(
                filename, "%s.%d" % (_get_tree_basename(fn2), 0), ".h5")
            if os.path.exists(catalog_filename):
                continue

            if ds1 is None:
                ds1 = self._load_ds(fn1, index_ptype=halo_type)
            ds2 = self._load_ds(fn2, index_ptype=halo_type)

            if self.comm.rank == 0:
                _print_link_info(ds1, ds2)

            if ds2.index.particle_count[halo_type] == 0:
                mylog.info("%s has no halos of type %s, ending." %
                           (ds2, halo_type))
                break

            if i == 0:
                target_ids = root_ids
                if not iterable(target_ids):
                    target_ids = np.array([target_ids])
                if isinstance(target_ids, YTArray):
                    target_ids = target_ids.d
                if target_ids.dtype != np.int64:
                    target_ids = target_ids.astype(np.int64)
            else:
                mylog.info("Loading target ids from %s.", target_filename)
                ds_target = yt_load(target_filename)
                target_ids = \
                  ds_target.r["halos",
                              "particle_identifier"].d.astype(np.int64)
                del ds_target

            id_store = []
            target_halos = []
            ancestor_halos = []

            njobs = min(self.comm.size, target_ids.size)
            pbar = get_pbar("Linking halos", target_ids.size, parallel=True)
            my_i = 0
            for halo_id in parallel_objects(target_ids, njobs=njobs):
                my_halo = ds1.halo(halo_type, halo_id)

                target_halos.append(my_halo)
                my_ancestors = self._find_ancestors(my_halo,
                                                    ds2,
                                                    id_store=id_store)
                ancestor_halos.extend(my_ancestors)
                my_i += njobs
                pbar.update(my_i)
            pbar.finish()

            if i == 0:
                for halo in target_halos:
                    halo.descendent_identifier = -1
                self._save_catalog(filename, ds1, target_halos, fields)
            self._save_catalog(filename, ds2, ancestor_halos, fields)

            if len(ancestor_halos) == 0:
                break

            ds1 = ds2
            clear_id_cache()