def slice(self, beam, *args, **kwargs): '''Return a SliceSet object according to the saved configuration. Generate it using the keywords of the self.compute_sliceset_kwargs(beam) method. Defines interface to create SliceSet instances (factory method). Arguments: - statistics=True attaches mean values, standard deviations and emittances to the SliceSet for all planes. - statistics=['mean_x', 'sigma_dp', 'epsn_z'] only adds the listed statistics values (can be used to save time). Valid list entries are all statistics functions of Particles. ''' sliceset_kwargs = self.compute_sliceset_kwargs(beam) if not isinstance(beam.z, np.ndarray): #print 'IS (probably) a GPUARRAY, sort it' s_idx_of_p = sliceset_kwargs['slice_index_of_particle'] perm = pm.argsort(s_idx_of_p) beam.reorder(perm) sliceset_kwargs = self.compute_sliceset_kwargs(beam) is_sorted = True else: #print 'IS np.ndarray' is_sorted = False # Update slice index of particles! otherwise cpu impl is wrong sliceset_kwargs['beam_parameters'] = ( self.extract_beam_parameters(beam)) sliceset_kwargs['beam_parameters']['is_sorted'] = is_sorted sliceset = SliceSet(**sliceset_kwargs) if 'statistics' in kwargs: self.add_statistics(sliceset, beam, kwargs['statistics']) return sliceset
def relocate_lost_particles(self, beam, alive): '''Overwriting the Aperture.relocate_lost_particles in order to update the SixTrackLib arrays with the fully reordered PyHEADTAIL macro-particle arrays before they get cut to the decreased length of still alive macro-particles. ''' # descending sort to have alive particles (the 1 entries) in the front perm = pm.argsort(-alive) beam.reorder(perm) n_alive = pm.sum(alive) # on CPU: (even if pm.device == 'GPU', as pm.sum returns np.ndarray) n_alive = np.int32(n_alive) ### additional part for SixTrackLib: self.pyht_to_stl(beam) ### also need to limit view on SixTrackLib attributes ### in PyHT beam for their next reordering beam.state = beam.state[:n_alive] beam.at_element = beam.at_element[:n_alive] beam.at_turn = beam.at_turn[:n_alive] beam.s = beam.s[:n_alive] if not beam.STL_longitudinal_update: beam.rpp = beam.rpp[:n_alive] beam.psigma = beam.psigma[:n_alive] beam.rvv = beam.rvv[:n_alive] return n_alive
def relocate_lost_particles(beam, alive): '''Relocate particles marked as lost to the end of the beam.u arrays (u = x, y, z, ...). Return the number of alive particles n_alive_post after considering the losses. Arguments: - beam: Particles instance - alive: boolean mask with length n_particles where 1 means alive ''' # descending sort to have alive particles (the 1 entries) in the front perm = pm.argsort(-alive) beam.reorder(perm) # on CPU: (even if pm.device == 'GPU', as pm.sum returns np.ndarray) n_alive = pm.sum(alive) return np.int32(n_alive)
def sort_for(self, attr): '''Sort the named particle attribute (coordinate / momentum) array and reorder all particles accordingly. ''' permutation = pm.argsort(getattr(self, attr)) self.reorder(permutation)
def align_particles(beam, mesh_3d): '''Sort all particles by their mesh node IDs.''' ids = mesh_3d.get_node_ids(beam.x, beam.y, beam.z_beamframe) permutation = pm.argsort(ids) beam.reorder(permutation)