T1.set_scales(domain.dealias, keep_data=True) T1['g'] += (0.5 - z_de) #Add T0 T1['g'] /= Nu #Scale Temp flucs properly T1['g'] -= (0.5 - z_de) #Subtract T0 for vel in vels: vel['g'] /= np.sqrt(Nu) not_corrected_times = False else: logger.info("restarting from {}".format(restart)) dt = checkpoint.restart(restart, solver) mode = 'append' not_corrected_times = False Nu = float(args['--restart_Nu']) true_t_ff = np.sqrt(Nu) checkpoint.set_checkpoint(solver, wall_dt=checkpoint_min * 60, mode=mode) ### 7. Set simulation stop parameters, output, and CFL if run_time_buoy is not None: solver.stop_sim_time = run_time_buoy * true_t_ff + solver.sim_time elif run_time_therm is not None: solver.stop_sim_time = run_time_therm / P + solver.sim_time else: solver.stop_sim_time = 1 / P + solver.sim_time solver.stop_wall_time = run_time_wall * 3600. max_dt = np.min((0.1 * true_t_ff, 1)) if dt is None: dt = max_dt analysis_tasks = initialize_rotating_output(solver, data_dir, aspect,
checkpoint = Checkpoint(data_dir) mode = 'overwrite' if restart is None: therm = Thermal(domain, atmosphere, falling=True, radius=radius, r_width=delta_r, A0=epsilon, z_pert=z_pert) therm.set_thermal(T1, T1_z, ln_rho1) else: logger.info("restarting from {}".format(restart)) dt = checkpoint.restart(restart, solver) mode = 'append' checkpoint.set_checkpoint(solver, sim_dt=buoyancy_time, mode=mode) ############################################## # Outputs and flow tracking analysis_tasks = initialize_output(data_dir, solver, threeD=threeD, output_cadence=output_cadence, mode=mode) # Flow properties flow = flow_tools.GlobalFlowProperty(solver, cadence=1) flow.add_property("vol_avg(rho_fluc*phi)", name='PE_fluc') flow.add_property("Re_rms", name='Re') flow.add_property("Ma_rms", name='Ma')
def solve_IVP(self, dt, CFL, data_dir, analysis_tasks, task_args=(), pre_loop_args=(), task_kwargs={}, pre_loop_kwargs={}, time_div=None, track_fields=['Pe'], threeD=False, Hermitian_cadence=100, no_join=False, mode='append'): """Logic for a while-loop that solves an initial value problem. Parameters ---------- dt : float The initial timestep of the simulation CFL : a Dedalus CFL object A CFL object that calculates the timestep of the simulation on the fly data_dir : string The parent directory of output files analysis_tasks : OrderedDict() An OrderedDict of dedalus FileHandler objects task_args, task_kwargs : list, dict, optional arguments & keyword arguments to the self._special_tasks() function pre_loop_args, pre_loop_kwargs: list, dict, optional arguments & keyword arguments to the self.pre_loop_setup() function time_div : float, optional A siulation time to divide the normal time by for easier output tracking threeD : bool, optional If True, occasionally force the solution to grid space to remove Hermitian errors Hermitian_cadence : int, optional The number of timesteps between grid space forcings in 3D. no_join : bool, optional If True, do not join files at the end of the simulation run. mode : string, optional Dedalus output mode for final checkpoint. "append" or "overwrite" args, kwargs : list and dictionary Additional arguments and keyword arguments to be passed to the self.special_tasks() function """ # Flow properties self.flow = flow_tools.GlobalFlowProperty(self.solver, cadence=1) for f in track_fields: self.flow.add_property(f, name=f) self.pre_loop_setup(*pre_loop_args, **pre_loop_kwargs) start_time = time.time() # Main loop count = 0 try: logger.info('Starting loop') init_time = self.solver.sim_time start_iter = self.solver.iteration while (self.solver.ok): dt = CFL.compute_dt() self.solver.step(dt) #, trim=True) # prevents blow-up over long timescales in 3D due to hermitian-ness effective_iter = self.solver.iteration - start_iter if threeD and effective_iter % Hermitian_cadence == 0: for field in self.solver.state.fields: field.require_grid_space() self.special_tasks(*task_args, **task_kwargs) #reporting string self.iteration_report(dt, track_fields, time_div=time_div) if not np.isfinite(self.flow.grid_average(track_fields[0])): break except: raise logger.error('Exception raised, triggering end of main loop.') finally: end_time = time.time() main_loop_time = end_time - start_time n_iter_loop = self.solver.iteration - 1 logger.info('Iterations: {:d}'.format(n_iter_loop)) logger.info('Sim end time: {:f}'.format(self.solver.sim_time)) logger.info('Run time: {:f} sec'.format(main_loop_time)) logger.info('Run time: {:f} cpu-hr'.format( main_loop_time / 60 / 60 * self.de_domain.domain.dist.comm_cart.size)) logger.info('iter/sec: {:f} (main loop only)'.format( n_iter_loop / main_loop_time)) try: final_checkpoint = Checkpoint( data_dir, checkpoint_name='final_checkpoint') final_checkpoint.set_checkpoint(self.solver, wall_dt=1, mode=mode) self.solver.step( dt) #clean this up in the future...works for now. post.merge_process_files(data_dir + '/final_checkpoint/', cleanup=False) except: raise print('cannot save final checkpoint') finally: if not no_join: logger.info('beginning join operation') post.merge_analysis(data_dir + 'checkpoint') for key, task in analysis_tasks.items(): logger.info(task.base_path) post.merge_analysis(task.base_path) logger.info(40 * "=") logger.info('Iterations: {:d}'.format(n_iter_loop)) logger.info('Sim end time: {:f}'.format(self.solver.sim_time)) logger.info('Run time: {:f} sec'.format(main_loop_time)) logger.info('Run time: {:f} cpu-hr'.format( main_loop_time / 60 / 60 * self.de_domain.domain.dist.comm_cart.size)) logger.info('iter/sec: {:f} (main loop only)'.format( n_iter_loop / main_loop_time))
h2 = 1.0*np.sin(15*np.pi*z_de) - 1.5*np.sin(12*np.pi*z_de) + 0.7*np.sin(18*np.pi*z_de) T1 = solver.state['T1'] T1_z = solver.state['T1_z'] T1.set_scales(domain.dealias) T1['g'] = 2e-3*(f1*g1*h1 + f2*g2*h2) T1.differentiate('z', out=T1_z) dt = None mode = 'overwrite' else: logger.info("restarting from {}".format(restart)) dt = checkpoint.restart(restart, solver) mode = 'append' not_corrected_times = False checkpoint.set_checkpoint(solver, sim_dt=checkpoint_dt, mode=mode) ### 7. Set simulation stop parameters, output, and CFL if run_time_buoy is not None: solver.stop_sim_time = run_time_buoy*t_buoy + solver.sim_time elif run_time_diff is not None: solver.stop_sim_time = run_time_diff*t_diff + solver.sim_time else: solver.stop_sim_time = 1 + solver.sim_time solver.stop_wall_time = run_time_wall*3600. #TODO: Check max_dt, cfl, etc. max_dt = np.min((1e-1, t_diff, t_buoy)) if dt is None: dt = max_dt analysis_tasks = initialize_output(solver, domain, data_dir, mode=mode, magnetic=False, threeD=False) # CFL CFL = flow_tools.CFL(solver, initial_dt=dt, cadence=1, safety=cfl_safety,
try: while solver.ok: dt = CFL.compute_dt() dt = solver.step(dt) if (solver.iteration - 1) % 1 == 0: logger.info('Iteration: {:.2e}, Time: {:.2e}, dt: {:.2e}'.format(solver.iteration, solver.sim_time, dt) +\ 'Max Re = {:.2e}, Circ = {:.2e}'.format(flow.max('Re'), flow.max('circ'))) if np.isnan(flow.max('v_rms')): logger.info('NaN, breaking.') break except: logger.error('Exception raised, triggering end of main loop.') raise finally: final_checkpoint = Checkpoint(data_dir, checkpoint_name='final_checkpoint') final_checkpoint.set_checkpoint(solver, wall_dt=1, mode="append") solver.step(dt / 1000) #clean this up in the future...works for now. for t in [checkpoint, final_checkpoint]: post.merge_process_files(t.checkpoint_dir, cleanup=False) for t in [slices, profiles, scalars]: post.merge_process_files(t.base_path, cleanup=False) end_time = time.time() logger.info('Iterations: %i' % solver.iteration) logger.info('Sim end time: %f' % solver.sim_time) logger.info('Run time: %.2f sec' % (end_time - start_time)) logger.info('Iter/sec: %.2f ' % (solver.iteration / (end_time - start_time))) logger.info( 'Run time: %f cpu-hr' % ((end_time - start_time) / 60 / 60 * domain.dist.comm_cart.size))