def _initialize_particle_handler(self): self._setup_data_io() self._setup_filenames() index_ptype = self.index_ptype if index_ptype == "all": self.total_particles = sum( sum(d.total_particles.values()) for d in self.data_files) else: self.total_particles = sum(d.total_particles[index_ptype] for d in self.data_files) ds = self.dataset self.oct_handler = ParticleOctreeContainer( [1, 1, 1], ds.domain_left_edge, ds.domain_right_edge, over_refine=ds.over_refine_factor) self.oct_handler.n_ref = ds.n_ref only_on_root( mylog.info, "Allocating for %0.3e particles " "(index particle type '%s')", self.total_particles, index_ptype) # No more than 256^3 in the region finder. N = min(len(self.data_files), 256) self.regions = ParticleRegions(ds.domain_left_edge, ds.domain_right_edge, [N, N, N], len(self.data_files)) self._initialize_indices() self.oct_handler.finalize() self.max_level = self.oct_handler.max_level self.dataset.max_level = self.max_level tot = sum(self.oct_handler.recursively_count().values()) only_on_root(mylog.info, "Identified %0.3e octs", tot)
def test_particle_regions(): np.random.seed(int(0x4d3d3d3)) # We are going to test having 31, 127, 128 and 257 data files. for nfiles in [2, 31, 127, 128, 129]: # Now we create particles # Note: we set N to nfiles here for testing purposes. Inside the code # we set it to min(N, 256) N = nfiles reg = ParticleRegions([0.0, 0.0, 0.0, 0.0], [nfiles, nfiles, nfiles], [N, N, N], nfiles) Y, Z = np.mgrid[0.1:nfiles - 0.1:nfiles * 1j, 0.1:nfiles - 0.1:nfiles * 1j] X = 0.5 * np.ones(Y.shape, dtype="float64") pos = np.array([X.ravel(), Y.ravel(), Z.ravel()], dtype="float64").transpose() for i in range(nfiles): reg.add_data_file(pos, i) pos[:, 0] += 1.0 pos[:, 0] = 0.5 fr = FakeRegion(nfiles) for i in range(nfiles): fr.set_edges(i) selector = RegionSelector(fr) df = reg.identify_data_files(selector) yield assert_equal, len(df), 1 yield assert_equal, df[0], i pos[:, 0] += 1.0 for mask in reg.masks: maxs = np.unique(mask.max(axis=-1).max(axis=-1)) mins = np.unique(mask.min(axis=-1).min(axis=-1)) yield assert_equal, maxs, mins yield assert_equal, maxs, np.unique(mask)
def _initialize_particle_handler(self): self._setup_data_io() template = self.dataset.filename_template ndoms = self.dataset.file_count cls = self.dataset._file_class self.data_files = [ cls(self.dataset, self.io, template % {'num': i}, i) for i in range(ndoms) ] self.total_particles = sum( sum(d.total_particles.values()) for d in self.data_files) ds = self.dataset self.oct_handler = ParticleOctreeContainer( [1, 1, 1], ds.domain_left_edge, ds.domain_right_edge, over_refine=ds.over_refine_factor) self.oct_handler.n_ref = ds.n_ref mylog.info("Allocating for %0.3e particles", self.total_particles) # No more than 256^3 in the region finder. N = min(len(self.data_files), 256) self.regions = ParticleRegions(ds.domain_left_edge, ds.domain_right_edge, [N, N, N], len(self.data_files)) self._initialize_indices() self.oct_handler.finalize() self.max_level = self.oct_handler.max_level tot = sum(self.oct_handler.recursively_count().values()) mylog.info("Identified %0.3e octs", tot)