def _get_trajectories(input_files, args): from atooms.trajectory import Sliced from atooms.core.utils import fractional_slice for input_file in input_files: with Trajectory(input_file, fmt=args['fmt']) as th: if args['center']: th.add_callback(center) # Caching is useful for systems with multiple species but # it will increase the memory footprint. Use --no-cache to # disable it if not args['no_cache']: th.cache = True if args['species_layout'] is not None: th.register_callback(change_species, args['species_layout']) sl = fractional_slice(args['first'], args['last'], args['skip'], len(th)) if th.block_size > 1: sl_start = ( sl.start // th.block_size ) * th.block_size if sl.start is not None else sl.start sl_stop = ( sl.stop // th.block_size ) * th.block_size if sl.stop is not None else sl.stop sl = slice(sl_start, sl_stop, sl.step) if sl != slice(None, None, 1): ts = Sliced(th, sl) else: ts = th yield ts
def dump_config(filename, pos=None, diam=None, ptypes=None, box=None, compress=None): file_ext = os.path.splitext(filename)[1] cell = Cell(side=box) npart = pos.shape[0] particles = [] for i in range(npart): p = Particle(species=ptypes[i], position=pos[i, :] - box / 2., radius=diam[i] / 2.) particles.append(p) system = System(particle=particles, cell=cell) with Trajectory(filename, "w") as traj: # Step is always 0 for now. traj.write(system, 0) if file_ext == ".xyz": compress = True else: compress = False if compress: subprocess.run(["gzip", "-f", filename])
def __init__(self, trj, grid, output_path=None, norigins=None, fix_cm=False): # Accept a trajectory-like instance or a path to a trajectory if isinstance(trj, str): self.trajectory = Trajectory(trj, mode='r', fmt=core.pp_trajectory_format) else: self.trajectory = trj self._fix_cm = fix_cm self._unfolded = None self.grid = grid self.value = [] self.analysis = {} self.comments = None # can be modified by user at run time self.tag = '' self.tag_description = 'the whole system' self.output_path = output_path if output_path is not None else core.pp_output_path self.skip = adjust_skip(self.trajectory, norigins) # Callbacks self._cbk = [] self._cbk_args = [] self._cbk_kwargs = [] # Lists for one body correlations self._pos = [] self._vel = [] self._ids = [] self._pos_unf = [] # Lists for two-body correlations self._pos_0, self._pos_1 = [], [] self._vel_0, self._vel_1 = [], [] self._ids_0, self._ids_1 = [], [] self._pos_unf_0, self._pos_unf_1 = [], [] # Weights self._weight = None self._weight_0, self._weight_1 = None, None self._weight_field = None self._weight_fluctuations = False
def __init__(self, trajectory, trajectory_radius, kgrid=None, norigins=-1, nk=20, dk=0.1, kmin=-1.0, kmax=15.0, ksamples=30): FourierSpaceCorrelation.__init__(self, trajectory, kgrid, 'k', 'ik', 'spectral density I(k)', ['pos'], nk, dk, kmin, kmax, ksamples) # TODO: move this up the chain? self.skip = adjust_skip(self.trajectory, norigins) self._is_cell_variable = None # TODO: check step consistency 06.09.2017 from atooms.trajectory import TrajectoryXYZ, Trajectory with Trajectory(trajectory_radius) as th: self._radius = [s.dump('particle.radius') for s in th]
def test_class_callback(self): def f(s): s._signal = True return s class TrajectoryXYZCustom(TrajectoryXYZ): pass TrajectoryXYZCustom.add_class_callback(f) Trajectory.add(TrajectoryXYZCustom) self.assertFalse(TrajectoryXYZCustom.class_callbacks is TrajectoryXYZ.class_callbacks) with Trajectory(os.path.join(self.inpdir, 'test.xyz'), 'w') as th: th.write(self.system[0]) with Trajectory(os.path.join(self.inpdir, 'test.xyz'), 'r') as th: s = th[0] self.assertTrue(hasattr(s, '_signal')) TrajectoryXYZCustom.class_callbacks.remove((f, (), {})) with Trajectory(os.path.join(self.inpdir, 'test.xyz'), 'r') as th: s = th[0] self.assertFalse(hasattr(s, '_signal'))
def decimate(traj_file, traj_file_out, num_per_block, skip): traj_file_out = os.path.join(os.path.dirname(traj_file), traj_file_out) with Trajectory(traj_file) as traj: result = detect_and_fix_spacing(traj.steps) if result['mode'] == "log": if num_per_block == None: raise Exception( "Please supply num_per_block for a log-spaced trajectory.") decimated_steps, decimated_frames = decimate_log_trajectory( traj.steps, num_per_block=num_per_block) else: if skip == None: raise Exception( "Please supply skip for a linear (or \"other\") trajectory." ) corrected_steps = result["corrected_steps"] decimated_steps, decimated_frames = decimate_trajectory( corrected_steps, skip=skip) with Trajectory(traj_file_out, "w") as new_traj: for i, frame in enumerate(decimated_frames): system = traj[int(frame)] new_traj.write(system, decimated_steps[i])
def inspect(traj_file): state = get_state_information(traj_file) dt = state['dt'] with Trajectory(traj_file) as traj: nframes = len(traj.steps) species = traj[0].distinct_species() nspecies = len(species) npart = len(traj[0].particle) click.echo("""System information: particles: %d distinct species: %d""" % (npart, nspecies)) click.echo(" species: " + ', '.join(species)) result = detect_and_fix_spacing(traj.steps) mode = result["mode"] if mode == "log": base = result["base"] max_exp = result["max_exp"] block_size = int(base**max_exp) tmin = traj.steps[0] tmax = traj.steps[-1] nblocks = (tmax - tmin) // block_size click.echo("""Trajectory information: frames: %d mode: %s blocks: %d block size: %g = %g * %d = %g * %d ** %d""" % (nframes, mode, nblocks, dt * block_size, dt, block_size, dt, base, max_exp)) elif mode == "linear": click.echo( """Trajectory information: frames: %d mode: %s spacing: %g = %g * %d""" % (nframes, mode, dt * result['spacing'], dt, result["spacing"])) elif mode == "other": click.echo("""Trajectory information: frames: %d mode: %s steps: %s""" % (nframes, mode, ", ".join( [str(x) for x in result["corrected_steps"].tolist()])))
def __init__(self, trajectory, kgrid=None, tgrid=None, nk=8, tsamples=60, dk=0.1, kmin=1.0, kmax=10.0, ksamples=10, norigins=-1, fix_cm=False, lookup_mb=64.0, normalize=True): # TODO: remove this backward compatible tgrid fix in a major release # The default time grid should be the same for F_s(k,t) and F(k,t) if isinstance(trajectory, str): trajectory = Trajectory(trajectory, mode='r', fmt=core.pp_trajectory_format) if tgrid is None: tgrid = [0.0] + logx_grid(trajectory.timestep, trajectory.total_time * 0.75, tsamples) super(SelfIntermediateScatteringLegacy, self).__init__(trajectory, kgrid=kgrid, tgrid=tgrid, nk=nk, tsamples=tsamples, dk=dk, kmin=kmin, kmax=kmax, ksamples=ksamples, norigins=norigins, fix_cm=fix_cm, normalize=normalize) self.lookup_mb = lookup_mb """Memory in Mb allocated for exponentials tabulation"""
def test_trajectory(self): # Make sure the original trajectories have no wrappers with Trajectory('data/lj_N256_rho1.0.xyz') as trajectory: self.assertEqual(trajectory.class_callbacks, None)
class Correlation(object): """ Base class for correlation functions. The correlation function is calculated for the trajectory `trj`. This can be: - an object implementing the atooms `Trajectory` interface - the path to a trajectory file in a format recognized by atooms A correlation function A(x) is defined over a grid of real entries {x_i} given by the list `grid`. To each entry of the grid, the correlation function has a corresponding value A_i=A(x_i). The latter values are stored in the `value` list. Correlation functions that depend on several variables, A(x,y,...) must provide a list of grids, one for each variable. The order is one specified by the `symbol` class variable, see below. The correlation function A is calculated as a statistical average over several time origins in the trajectory `trj`. The `norigins` variable can be used to tune the number of time origins used to carry out the average. `norigins` can take the following values: - `norigins=-1`: all origins are used - an integer >= 1: use only `n_origins` time origins - a float in the interval (0,1): use only a fraction `norigins` of frames as time origins - `None`: a heuristics is used to keep the product of steps times particles constant Subclasses must provide a symbolic expression of the correlation function through the `symbol` class variable. The following convention is used: if a correlation function A depends on variables x and y, then `symbol = 'A(x,y)'`. The `phasespace` variable allow subclasses to access a list of 2d numpy arrays with particles coordinates via the following private variables: - if `nbodies` is 1: `self._pos` for positions, `self._vel` for velocities, `self._unf_pos` for PBC-unfolded positions - if `nbodies` is 2, an additional suffix that take values 0 or 1 is added to distinguish the two sets of particles, e.g. `self._pos_0` and `self._pos_1` """ nbodies = 1 """ Class variable that controls the number of bodies, i.e. particles, associated to the correlation function. This variable controls the internal arrays used to compute the correlation, see `phasespace`. """ static = False """ Turn this to `True` is the correlation function is static, i.e. not time-dependent. This may enable some optimizations. """ symbol = '' """Example: fskt""" short_name = '' """Example: F_s(k,t)""" long_name = '' """Example: Self intermediate scattering function""" phasespace = ['pos', 'pos-unf', 'vel'] """ List of strings or string among ['pos', 'pos-unf', 'vel']. It indicates which variables should be read from the trajectory file. They will be available as self._pos, self._pos_unf, self._vel. """ def __init__(self, trj, grid, output_path=None, norigins=None, fix_cm=False): # Accept a trajectory-like instance or a path to a trajectory if isinstance(trj, str): self.trajectory = Trajectory(trj, mode='r', fmt=core.pp_trajectory_format) else: self.trajectory = trj self._fix_cm = fix_cm self._unfolded = None self.grid = grid self.value = [] self.analysis = {} self.comments = None # can be modified by user at run time self.tag = '' self.tag_description = 'the whole system' self.output_path = output_path if output_path is not None else core.pp_output_path self.skip = adjust_skip(self.trajectory, norigins) # Callbacks self._cbk = [] self._cbk_args = [] self._cbk_kwargs = [] # Lists for one body correlations self._pos = [] self._vel = [] self._ids = [] self._pos_unf = [] # Lists for two-body correlations self._pos_0, self._pos_1 = [], [] self._vel_0, self._vel_1 = [], [] self._ids_0, self._ids_1 = [], [] self._pos_unf_0, self._pos_unf_1 = [], [] # Weights self._weight = None self._weight_0, self._weight_1 = None, None self._weight_field = None self._weight_fluctuations = False def __str__(self): return '{} at <{}>'.format(self.long_name, id(self)) def add_weight(self, trajectory=None, field=None, fluctuations=False): """ Add weight from the given `field` in `trajectory` If `trajectory` is `None`, `self.trajectory` will be used and `field` must be a particle property. If `field` is `None`, `trajectory` is assumed to be a path an xyz trajectory and we use the data in last column as a weight. If both `field` and `trajectory` are `None` the function returns immediately and the weight is not set. The optional `fluctuations` option subtracts the mean, calculated from the ensemble average, from the weight. """ if trajectory is None and field is None: return self._weight = [] self._weight_fluctuations = fluctuations # Guessing the field from the last column of an xyz file is # not supported anymore if field is None: raise ValueError('provide field to use as weight') else: self._weight_field = field # By default we use the same trajectory as for the phasespace if trajectory is None: self._weight_trajectory = self.trajectory else: self._weight_trajectory = trajectory # Copy over the field from .helpers import copy_field self.trajectory.add_callback(copy_field, self._weight_field, self._weight_trajectory) # Make sure the steps are consistent if self._weight_trajectory.steps != self.trajectory.steps: raise ValueError( 'inconsistency between weight trajectory and trajectory') # Modify tag fluct = 'fluctuations' if self._weight_fluctuations else '' self.tag_description += ' with {} {} field'.format( self._weight_field.replace('_', ' '), fluct) self.tag_description = self.tag_description.replace(' ', ' ') self.tag += '.{}_{}'.format(self._weight_field, fluct) self.tag.strip('_.') def add_filter(self, cbk, *args, **kwargs): """Add filter callback `cbk` along with positional and keyword arguments""" if len(self._cbk) > self.nbodies: raise ValueError('number of filters cannot exceed n. of bodies') self._cbk.append(cbk) self._cbk_args.append(args) self._cbk_kwargs.append(kwargs) def need_update(self): """Check if the trajectory file is newer than the output file""" need = True if os.path.exists( self._output_file) and self._output_file != '/dev/stdout': if os.path.getmtime(self.trajectory.filename) < \ os.path.getmtime(self._output_file): need = False return need def _setup_arrays(self): """ Dump positions and/or velocities at different time frames as a list of numpy array. """ # TODO: what happens if we call compute twice?? Shouldnt we reset the arrays? # Ensure phasespace is a list. # It may not be a class variable anymore after this if not isinstance(self.phasespace, list) and \ not isinstance(self.phasespace, tuple): self.phasespace = [self.phasespace] # Setup arrays if self.nbodies == 1: self._setup_arrays_onebody() self._setup_weight_onebody() elif self.nbodies == 2: self._setup_arrays_twobody() self._setup_weight_twobody() def _setup_arrays_onebody(self): """ Setup list of numpy arrays for one-body correlations. We also take care of dumping the weight if needed, see `add_weight()`. """ if 'pos' in self.phasespace or 'vel' in self.phasespace or 'ids' in self.phasespace: ids = distinct_species(self.trajectory[0].particle) for s in progress(self.trajectory): # Apply filter if there is one if len(self._cbk) > 0: s = self._cbk[0](s, *self._cbk_args[0], **self._cbk_kwargs[0]) if 'pos' in self.phasespace: self._pos.append(s.dump('pos')) if 'vel' in self.phasespace: self._vel.append(s.dump('vel')) if 'ids' in self.phasespace: _ids = s.dump('species') _ids = numpy.array([ids.index(_) for _ in _ids], dtype=numpy.int32) self._ids.append(_ids) # Dump unfolded positions if requested if 'pos-unf' in self.phasespace: if self._unfolded is None: self._unfolded = Unfolded(self.trajectory, fixed_cm=self._fix_cm) for s in progress(self._unfolded): # Apply filter if there is one if len(self._cbk) > 0: s = self._cbk[0](s, *self._cbk_args[0], **self._cbk_kwargs[0]) self._pos_unf.append(s.dump('pos')) def _setup_weight_onebody(self): """ Setup list of numpy arrays for the weight, see `add_weight()` """ if self._weight is None: return # Dump arrays of weights for s in progress(self.trajectory): # Apply filter if there is one # TODO: fix when weight trajectory does not contain actual particle info # It should be possible to link the weight trajectory to the trajectory # and return the trajectory particles with the weight if len(self._cbk) > 0: s = self._cbk[0](s, *self._cbk_args[0], **self._cbk_kwargs[0]) current_weight = s.dump('particle.%s' % self._weight_field) self._weight.append(current_weight) # Subtract global mean if self._weight_fluctuations: _subtract_mean(self._weight) def _setup_weight_twobody(self): """ Setup list of numpy arrays for the weight, see `add_weight()` """ if self._weight is None: return self._weight = [] self._weight_0 = [] self._weight_1 = [] # TODO: add checks on number of filters if len(self._cbk) <= 1: self._setup_weight_onebody() self._weight_0 = self._weight self._weight_1 = self._weight return # Dump arrays of weights for s in progress(self.trajectory): # Apply filters if len(self._cbk) == 2: s0 = self._cbk[0](s, *self._cbk_args[0], **self._cbk_kwargs[0]) s1 = self._cbk[1](s, *self._cbk_args[1], **self._cbk_kwargs[1]) self._weight_0.append(s0.dump('particle.%s' % self._weight_field)) self._weight_1.append(s1.dump('particle.%s' % self._weight_field)) # Subtract global mean if self._weight_fluctuations: _subtract_mean(self._weight_0) _subtract_mean(self._weight_1) def _setup_arrays_twobody(self): """Setup list of numpy arrays for two-body correlations.""" if len(self._cbk) <= 1: self._setup_arrays_onebody() self._pos_0 = self._pos self._pos_1 = self._pos self._vel_0 = self._vel self._vel_1 = self._vel self._ids_0 = self._ids self._ids_1 = self._ids return if 'pos' in self.phasespace or 'vel' in self.phasespace or 'ids' in self.phasespace: ids = distinct_species(self.trajectory[0].particle) for s in progress(self.trajectory): s0 = self._cbk[0](s, *self._cbk_args[0], **self._cbk_kwargs[0]) s1 = self._cbk[1](s, *self._cbk_args[1], **self._cbk_kwargs[1]) if 'pos' in self.phasespace: self._pos_0.append(s0.dump('pos')) self._pos_1.append(s1.dump('pos')) if 'vel' in self.phasespace: self._vel_0.append(s0.dump('vel')) self._vel_1.append(s1.dump('vel')) if 'ids' in self.phasespace: _ids_0 = s0.dump('species') _ids_1 = s1.dump('species') _ids_0 = numpy.array([ids.index(_) for _ in _ids_0], dtype=numpy.int32) _ids_1 = numpy.array([ids.index(_) for _ in _ids_1], dtype=numpy.int32) self._ids_0.append(_ids_0) self._ids_1.append(_ids_1) # Dump unfolded positions if requested if 'pos-unf' in self.phasespace: for s in progress(Unfolded(self.trajectory)): s0 = self._cbk[0](s, *self._cbk_args[0], **self._cbk_kwargs[0]) s1 = self._cbk[1](s, *self._cbk_args[1], **self._cbk_kwargs[1]) self._pos_unf_0.append(s0.dump('pos')) self._pos_unf_1.append(s1.dump('pos')) def compute(self): """ Compute the correlation function. It wraps the _compute() method implemented by subclasses. This method sets the `self.grid` and `self.value` variables, which are also returned. """ _log.info('setup arrays for %s', self.tag_description) t = [Timer(), Timer()] t[0].start() self._setup_arrays() t[0].stop() _log.info('computing %s for %s', self.long_name, self.tag_description) _log.info('using %s time origins out of %s', len(range(0, len(self.trajectory), self.skip)), len(self.trajectory)) t[1].start() self._compute() t[1].stop() _log.info('output file %s', self._output_file) _log.info('done %s for %s in %.1f sec [setup:%.0f%%, compute: %.0f%%]', self.long_name, self.tag_description, t[0].wall_time + t[1].wall_time, t[0].wall_time / (t[0].wall_time + t[1].wall_time) * 100, t[1].wall_time / (t[0].wall_time + t[1].wall_time) * 100) _log.info('') return self.grid, self.value def _compute(self): """Subclasses must implement this""" pass def analyze(self): """ Subclasses may implement this and store the results in the self.analysis dictonary """ pass @property def _output_file(self): """Returns path of output file""" # Interpolate the output path string if self.output_path is None: filename = None else: filename = self.output_path.format( symbol=self.symbol, short_name=self.short_name, long_name=self.long_name.replace(' ', '_'), tag=self.tag, tag_description=self.tag_description.replace(' ', '_'), trajectory=self.trajectory) # Strip unpleasant punctuation from basename path for punct in ['.', '_', '-']: subpaths = filename.split('/') subpaths[-1] = subpaths[-1].replace(punct * 2, punct) subpaths[-1] = subpaths[-1].strip(punct) filename = '/'.join(subpaths) return filename def read(self): """Read correlation function from existing file""" with open(self._output_file, 'r') as inp: x = numpy.loadtxt(inp, unpack=True) if len(x) == 3: _log.warn("cannot read 3-columns files yet in %s", self._output_file) elif len(x) == 2: self.grid, self.value = x else: self.grid, self.value = x[0:2] _log.warn("Ignoring some columns in %s", self._output_file) @property def grid_name(self): """ Return the name of the grid variables Example: ------- If `self.name` is `F_s(k,t)`, the function returns `['k', 't']` """ variables = self.short_name.split('(')[1][:-1] return variables.split(',') def write(self): """ Write the correlation function and the analysis data to file The `output_path` instance variable is used to define the output files by interpolating the following variables: - symbol - short_name - long_name - tag - tag_description - trajectory The default is defined by core.pp_output_path, which currently looks like '{trajectory.filename}.pp.{symbol}.{tag}' """ def is_iterable(maybe_iterable): try: iter(maybe_iterable) except TypeError: return False else: return True # Pack grid and value into arrays to dump if is_iterable(self.grid[0]) and len(self.grid) == 2: x = numpy.array(self.grid[0]).repeat(len(self.value[0])) y = numpy.array(self.grid[1] * len(self.grid[0])) z = numpy.array(self.value).flatten() dump = numpy.transpose(numpy.array([x, y, z])) else: dump = numpy.transpose(numpy.array([self.grid, self.value])) # Comment line # Extract variables from parenthesis in symbol variables = self.short_name.split('(')[1][:-1] variables = variables.split(',') columns = variables + [self.short_name] #[self.symbol] if len(self.tag_description) > 0: conj = 'of' else: conj = '' comments = _dump( title='%s %s %s %s' % (self.long_name, self.short_name, conj, self.tag_description), columns=columns, command='atooms-pp', version=core.__version__, description=None, note=None, parents=self.trajectory.filename, inline=False) if self.comments is not None: comments += self.comments # Results of analysis analysis = "" for x, f in self.analysis.items(): if f is not None: analysis += '# %s: %s\n' % (x, f) # Put it all together # and make sure the path to the output file exists import os from atooms.core.utils import mkdir mkdir(os.path.dirname(self._output_file)) with open(self._output_file, 'w') as fh: fh.write(comments) if len(analysis) > 0: fh.write(analysis) numpy.savetxt(fh, dump, fmt="%g") fh.flush() def do(self, update=False): """ Do the full template pattern: compute, analyze and write the correlation function. """ if update and not self.need_update(): self.read() return self.compute() try: self.analyze() except ImportError as e: _log.warn( 'Could not analyze due to missing modules, continuing...') _log.warn(e.message) self.write() def __call__(self): self.do()
def self_intermediate_scattering_function_particle(traj_file, k_value, start_pos, final_pos, check_reference_mean=False, **kwargs): """ traj_file: is required to get box length, etc. (was not really necessary, but is convenient for now.) final_pos: (num_frames, N, dim) array """ kgrid = [k_value] npart = start_pos.shape[0] ndim = start_pos.shape[1] num_frames = final_pos.shape[0] displacement = final_pos - start_pos with Trajectory(traj_file, "r") as traj: tgrid = traj.steps box_size = traj[0].cell.side corr = FourierSpaceCorrelation(trajectory=traj, grid=[kgrid, tgrid], **kwargs) if check_reference_mean: analysis_kwargs = copy.copy(kwargs) analysis_kwargs['fix_cm'] = False analysis_kwargs['normalize'] = False analysis = pp.SelfIntermediateScattering(trajectory=traj, kgrid=kgrid, tgrid=tgrid, **analysis_kwargs) analysis.do() result_coslovich = np.array(analysis.value[0]) corr.kgrid = kgrid # Setup grid once. If cell changes we'll call it again corr._setup() # Pick up a random, unique set of nk vectors out of the available ones # without exceeding maximum number of vectors in shell nkmax corr.kgrid, corr.selection = corr._decimate_k() # We redefine the grid because of slight differences on the # average k norms appear after decimation. corr.kgrid = corr._actual_k_grid() # We must fix the keys: just pop them to the their new positions # We sort both of them (better check len's) for k, kv in zip(sorted(corr.kgrid), sorted(corr.kvector)): corr.kvector[k] = corr.kvector.pop(kv) kvectors = corr.kvectors[corr.kgrid[0]] nk_actual = len(kvectors) # Construct Freud box object to correctly apply minimum image convention. if ndim == 2: box = freud.box.Box(Lx=box_size[0], Ly=box_size[1]) # Add z-coordinate of 0 (only necessary for applying minimum image with Freud). displacement_new = np.zeros((num_frames, npart, 3)) displacement_new[:, :, :2] = displacement displacement = displacement_new elif ndim == 3: box = freud.box.Box(Lx=box_size[0], Ly=box_size[1], Lz=box_size[2]) # Apply minimum image convention between final_pos and start_pos for iso_run in range(num_frames): displacement[iso_run, :, :] = box.wrap(displacement[iso_run, :, :]) # Throw away dummy z-coordinate if in 2D. displacement = displacement[:, :, :ndim] result = np.zeros((nk_actual, num_frames, npart)) for k_idx, kvec in enumerate(kvectors): result[k_idx, :, :] = np.cos(np.dot(displacement, kvec)) # Average over different k vectors. result = np.mean(result, axis=0) if check_reference_mean: return result, result_coslovich else: return result
def read_sample(self, frame): from atooms.trajectory import Trajectory with Trajectory(self.files[frame], fmt=self._cls) as th: # We must use read(), instead of read_sample(), to # initialize the trajectory properly return th.read(0)
def read_timestep(self): from atooms.trajectory import Trajectory with Trajectory(self.files[0], fmt=self._cls) as th: return th.read_timestep()
box_size = snap['box'] num_frames = min([max_frames, num_frames]) print("reading trajectory of %d frames..." % num_frames) snap, _ = read_trajectory(traj_file, frame=slice(0, num_frames), unfold=False, fixed_cm=False) pos = snap['pos'] steps = snap['steps'] result = detect_and_fix_spacing(steps) steps = result["corrected_steps"] print("done.") print("calculating relative positions...") voro = voronoi_analysis(pos_start, box_size) relative_pos = cage_relative_traj(pos, box_size, voro) print("done.") with Trajectory(traj_file, "r") as traj: with Trajectory(rel_traj_file, "w") as rel_traj: for frame in range(num_frames): system = traj[frame] for i in range(npart): system.particle[i].position = relative_pos[frame, i, :] print("writing %d" % frame) rel_traj.write(system, steps[frame])