def timeloop(): global a, a_dump, drift_fac, i_dump, kick_fac, t, Δt # Do nothing if no dump times exist if len(a_dumps) == 0: return # Get the output filename patterns output_filenames = prepare_output_times() # Load initial conditions particles = load_particles(IC_file) # The number of time steps before Δt is updated Δt_update_freq = 10 # Initial cosmic time t, where a(t) = a_begin a = a_begin t = cosmic_time(a) # The time step size should be a # small fraction of the age of the universe. Δt = Δt_factor*t # Arrays containing the drift and kick factors ∫_t^(t + Δt/2)dt/a # and ∫_t^(t + Δt/2)dt/a**2. The two elements in each variable are # the first and second half of the factor for the entire time step. drift_fac = zeros(2, dtype=C2np['double']) kick_fac = zeros(2, dtype=C2np['double']) # Scalefactor at next dump and a corresponding index i_dump = 0 a_dump = a_dumps[i_dump] # Possible output at a == a_begin dump(particles, output_filenames) # The main time loop masterprint('Begin main time loop') timestep = -1 while i_dump < len(a_dumps): timestep += 1 # Print out message at beginning of each time step masterprint(terminal.bold('\nTime step {}'.format(timestep)) + '{:<14} {}' .format('\nScale factor:', significant_figures(a, 4, fmt='Unicode')) + '{:<14} {} Gyr'.format('\nCosmic time:', significant_figures(t/units.Gyr, 4, fmt='Unicode')) ) # Kick (first time is only half a kick, as kick_fac[1] == 0) do_kick_drift_integrals(0) particles.kick(kick_fac[0] + kick_fac[1]) if dump(particles, output_filenames, 'drift'): continue # Update Δt every Δt_update_freq time step if not (timestep % Δt_update_freq): # Let the positions catch up to the momenta particles.drift(drift_fac[0]) Δt = Δt_factor*t # Reset the second kick factor, # making the next operation a half kick. kick_fac[1] = 0 continue # Drift do_kick_drift_integrals(1) particles.drift(drift_fac[0] + drift_fac[1]) if dump(particles, output_filenames, 'kick'): continue
def do_kick_drift_integrals(index): global a, a_dump, drift_fac, kick_fac, t, Δt # Update the scale factor and the cosmic time. This also # tabulates a(t), needed for the kick and drift integrals. a_next = expand(a, t, 0.5*Δt) t_next = t + 0.5*Δt if a_next >= a_dump: # Dump time reached. A smaller time step than # 0.5*Δt is needed to hit a_dump exactly. a_next = a_dump t_next = cosmic_time(a_dump, a, t, t_next) expand(a, t, t_next - t) a = a_next t = t_next # Do the kick and drift integrals # ∫_t^(t + Δt/2)dt/a and ∫_t^(t + Δt/2)dt/a**2. kick_fac[index] = scalefactor_integral(-1) drift_fac[index] = scalefactor_integral(-2)
# As this non-compiled code should work regardless of whether # the main CO𝘕CEPT code is compiled or not, we need to flood # this name space with names from commons explicitly, as # 'from commons import *' does not import C level variables. commons_flood() # Initiate the cosmic time and the scale factor, # and do the call to CLASS if enable_class is True. initiate_time() # Array of scale factor values at which to compute the cosmic time N_points = 50 scale_factors = logspace(log10(a_begin), log10(1), N_points) # Compute the cosmic time for each value of the scale factor cosmic_times = [cosmic_time(a) for a in scale_factors] # Dependent on the mode, save the computed cosmic times compiled = not user_params['_pure_python'] mode = f'class={enable_class}_compiled={compiled}' np.savetxt(f'{this_dir}/t_{mode}.dat', cosmic_times) # If all four data files exist, plot and analyze these data_filenames = glob(f'{this_dir}/*.dat') if sum(bool(re.search(f'^{this_dir}/t_class=(True|False)_compiled=(True|False)\.dat$' , fname)) for fname in data_filenames) == 4: masterprint('Analyzing {} data ...'.format(this_test)) # Load in the data all_times = {} for filename in data_filenames: if re.search('class=True', filename):
# As this non-compiled code should work regardless of whether # the main CO𝘕CEPT code is compiled or not, we need to flood # this name space with names from commons explicitly, as # 'from commons import *' does not import C level variables. commons_flood() # Initiate the cosmic time and the scale factor, # and do the call to CLASS if enable_class_background is True. initiate_time() # Array of scale factor values at which to compute the cosmic time N_points = 50 scale_factors = logspace(log10(a_begin), log10(1), N_points) # Compute the cosmic time for each value of the scale factor cosmic_times = [cosmic_time(a) for a in scale_factors] # Dependent on the mode, save the computed cosmic times compiled = not user_params['_pure_python'] mode = f'class={enable_class_background}_compiled={compiled}' np.savetxt(f'{this_dir}/t_{mode}.dat', cosmic_times) # If all four data files exist, plot and analyze these data_filenames = glob(f'{this_dir}/*.dat') if sum([bool(re.search(f'^{this_dir}/t_class=(True|False)_compiled=(True|False)\.dat$' , fname)) for fname in data_filenames]) == 4: masterprint('Analyzing {} data ...'.format(this_test)) # Load in the data all_times = {} for filename in data_filenames: if re.search('class=True', filename):