def trace_descendents(self, halo_type, fields=None, filename=None): """ Trace the descendents of all halos. A merger-tree for all halos will be created, starting with the first halo catalog and moving forward. Parameters ---------- halo_type : string The type of halo, typically "FOF" for FoF groups or "Subfind" for subhalos. fields : optional, list of strings List of additional fields to be saved to halo catalogs. filename : optional, string Directory in which merger-tree catalogs will be saved. """ output_dir = os.path.dirname(filename) if self.comm.rank == 0 and len(output_dir) > 0: ensure_dir(output_dir) all_outputs = self.ts.outputs[:] ds1 = ds2 = None for i, fn2 in enumerate(all_outputs[1:]): fn1 = all_outputs[i] target_filename = get_output_filename( filename, "%s.%d" % (_get_tree_basename(fn1), 0), ".h5") catalog_filename = get_output_filename( filename, "%s.%d" % (_get_tree_basename(fn2), 0), ".h5") if os.path.exists(target_filename): continue if ds1 is None: ds1 = self._load_ds(fn1, index_ptype=halo_type) ds2 = self._load_ds(fn2, index_ptype=halo_type) if self.comm.rank == 0: _print_link_info(ds1, ds2) target_halos = [] if ds1.index.particle_count[halo_type] == 0: self._save_catalog(filename, ds1, target_halos, fields) ds1 = ds2 continue target_ids = \ ds1.r[halo_type, "particle_identifier"].d.astype(np.int64) njobs = min(self.comm.size, target_ids.size) pbar = get_pbar("Linking halos", target_ids.size, parallel=True) my_i = 0 for halo_id in parallel_objects(target_ids, njobs=njobs): my_halo = ds1.halo(halo_type, halo_id) target_halos.append(my_halo) self._find_descendent(my_halo, ds2) my_i += njobs pbar.update(my_i) pbar.finish() self._save_catalog(filename, ds1, target_halos, fields) ds1 = ds2 clear_id_cache() if os.path.exists(catalog_filename): return if ds2 is None: ds2 = self._load_ds(fn2, index_ptype=halo_type) if self.comm.rank == 0: self._save_catalog(filename, ds2, halo_type, fields)
def trace_ancestors(self, halo_type, root_ids, fields=None, filename=None): """ Trace the ancestry of a given set of halos. A merger-tree for a specific set of halos will be created, starting with the last halo catalog and moving backward. Parameters ---------- halo_type : string The type of halo, typically "FOF" for FoF groups or "Subfind" for subhalos. root_ids : integer or array of integers The halo IDs from the last halo catalog for the targeted halos. fields : optional, list of strings List of additional fields to be saved to halo catalogs. filename : optional, string Directory in which merger-tree catalogs will be saved. """ output_dir = os.path.dirname(filename) if self.comm.rank == 0 and len(output_dir) > 0: ensure_dir(output_dir) all_outputs = self.ts.outputs[::-1] ds1 = None for i, fn2 in enumerate(all_outputs[1:]): fn1 = all_outputs[i] target_filename = get_output_filename( filename, "%s.%d" % (_get_tree_basename(fn1), 0), ".h5") catalog_filename = get_output_filename( filename, "%s.%d" % (_get_tree_basename(fn2), 0), ".h5") if os.path.exists(catalog_filename): continue if ds1 is None: ds1 = self._load_ds(fn1, index_ptype=halo_type) ds2 = self._load_ds(fn2, index_ptype=halo_type) if self.comm.rank == 0: _print_link_info(ds1, ds2) if ds2.index.particle_count[halo_type] == 0: mylog.info("%s has no halos of type %s, ending." % (ds2, halo_type)) break if i == 0: target_ids = root_ids if not iterable(target_ids): target_ids = np.array([target_ids]) if isinstance(target_ids, YTArray): target_ids = target_ids.d if target_ids.dtype != np.int64: target_ids = target_ids.astype(np.int64) else: mylog.info("Loading target ids from %s.", target_filename) ds_target = yt_load(target_filename) target_ids = \ ds_target.r["halos", "particle_identifier"].d.astype(np.int64) del ds_target id_store = [] target_halos = [] ancestor_halos = [] njobs = min(self.comm.size, target_ids.size) pbar = get_pbar("Linking halos", target_ids.size, parallel=True) my_i = 0 for halo_id in parallel_objects(target_ids, njobs=njobs): my_halo = ds1.halo(halo_type, halo_id) target_halos.append(my_halo) my_ancestors = self._find_ancestors(my_halo, ds2, id_store=id_store) ancestor_halos.extend(my_ancestors) my_i += njobs pbar.update(my_i) pbar.finish() if i == 0: for halo in target_halos: halo.descendent_identifier = -1 self._save_catalog(filename, ds1, target_halos, fields) self._save_catalog(filename, ds2, ancestor_halos, fields) if len(ancestor_halos) == 0: break ds1 = ds2 clear_id_cache()