Ejemplo n.º 1
0
def test_1d_output(x_basis_class, Nx, timestepper, dtype):
    # Bases and domain
    x_basis = x_basis_class('x', Nx, interval=(0, 2 * np.pi))
    domain = de.Domain([x_basis], grid_dtype=dtype)
    # Forcing
    F = domain.new_field(name='F')
    x = domain.grid(0)
    F['g'] = -np.sin(x)
    # Problem
    problem = de.IVP(domain, variables=['u', 'ux'])
    problem.parameters['F'] = F
    problem.add_equation("ux - dx(u) = 0")
    problem.add_equation("-dt(u) + dx(ux) = F")
    problem.add_bc("left(u) - right(u) = 0")
    problem.add_bc("left(ux) - right(ux) = 0")
    # Solver
    solver = problem.build_solver(timestepper)
    # Output
    output = solver.evaluator.add_file_handler('test_output', iter=1)
    output.add_task('u', layout='g', name='ug')
    # Loop
    dt = 1e-5
    iter = 10
    for i in range(iter):
        solver.step(dt)
    # Check solution
    post.merge_process_files('test_output')
    with h5py.File('test_output/test_output_s1.h5', mode='r') as file:
        ug = file['tasks']['ug'][:]
        t = file['scales']['sim_time'][:]
    shutil.rmtree('test_output')
    amp = 1 - np.exp(-t[:, None])
    u_true = amp * np.sin(x[None, :])
    assert np.allclose(ug, u_true)
Ejemplo n.º 2
0
    def write_global_domain(self, field_system, base_name="IVP_output"):
        """Given a field system, writes a Dedalus HDF5 file.

        Typically, one would use this to write a field system constructed by project_mode. 

        Parameters
        ----------
        field_system : dedalus.core.system.FieldSystem
            A field system containing the data to be written
        base_name : str, optional
            The base filename of the resulting HDF5 file. (default: IVP_output)

        """
        output_evaluator = Evaluator(field_system.domain, self.EVP.namespace)
        output_handler = output_evaluator.add_file_handler(base_name)
        output_handler.add_system(field_system)

        output_evaluator.evaluate_handlers(output_evaluator.handlers,
                                           timestep=0,
                                           sim_time=0,
                                           world_time=0,
                                           wall_time=0,
                                           iteration=0)

        merge_process_files(base_name,
                            cleanup=True,
                            comm=output_evaluator.domain.distributor.comm)
Ejemplo n.º 3
0
    def merge_results(self, label, full_merge=False, force=False):
        """Merge the different process result files together.

        Parameters:
            label (str): The name of the subdirectory containing folders where
                each process computes results. For example, if label="states",
                then self.records_dir/states/ should exist and contain at least
                one subfolder named states_s1/ (or similar), which in turn
                contains .h5 files from each process (states_s1_p0.h5, etc.).
            full_merge (bool): If true, merge the process files AND merge
                the resulting files into one large file. For example,
                states_s1_p0.h5 and states_s1_p1.h5 are merged into
                states_s1.h5 like usual, and then states_s1.h5 and states_s2.h5
                are merged into one large states.h5.
            force (bool): If true, merge the files even if there is an existing
                merged file.
        """
        # Check that the relevant folder exists.
        subdir = os.path.join(self.records_dir, label)
        if not os.path.isdir(subdir):
            raise NotADirectoryError(subdir)

        # Check for existing folders and missing files before merging.
        work_todo = False
        if full_merge:
            work_todo = not os.path.isfile(os.path.join(subdir, label + ".h5"))
        else:
            for d in os.listdir(subdir):
                target = os.path.join(subdir, d)
                if os.path.isdir(target) and not os.path.isfile(target +
                                                                ".h5"):
                    work_todo = True
                    break
        if work_todo or force:
            self.logger.info("Merging {} files...".format(label))
            post.merge_process_files(subdir,
                                     cleanup=False,
                                     comm=MPI.COMM_WORLD)
            if full_merge:
                # Wait for other processes to finish.
                MPI.COMM_WORLD.Barrier()
                # Do the last merge.
                set_paths = glob(os.path.join(subdir, label + "_s*.h5"))
                post.merge_sets(os.path.join(subdir, label + ".h5"),
                                set_paths,
                                cleanup=True,
                                comm=MPI.COMM_WORLD)
            self.logger.info("\t{} files now {}merged".format(
                label, "fully " if full_merge else ""))
Ejemplo n.º 4
0
def merge_move(rundir, outdir):

    # Merge snapshots from different processes:
    post.merge_process_files(rundir + "snapshots", cleanup=True)
    set_paths = list(
        pathlib.Path(rundir + "snapshots").glob("snapshots_s*.h5"))
    post.merge_sets(rundir + "snapshots/snapshots.h5", set_paths, cleanup=True)
    # ifields
    post.merge_process_files(rundir + "ifields", cleanup=True)
    set_paths = list(pathlib.Path(rundir + "ifields").glob("ifields_s*.h5"))
    post.merge_sets(rundir + "ifields/ifields.h5", set_paths, cleanup=True)
    # moments
    post.merge_process_files(rundir + "moments", cleanup=True)
    set_paths = list(pathlib.Path(rundir + "moments").glob("moments_s*.h5"))
    post.merge_sets(rundir + "moments/moments.h5", set_paths, cleanup=True)
Ejemplo n.º 5
0
                solver.pencils[0].LHS.tocsc().nnz))
            logger.info("{} fill in factor".format(
                LU.nnz / solver.pencils[0].LHS.tocsc().nnz))

except:
    logger.error('Exception raised, triggering end of main loop.')
    raise
finally:
    final_checkpoint = Checkpoint(data_dir, checkpoint_name='final_checkpoint')
    final_checkpoint.set_checkpoint(solver, wall_dt=1, mode="append")
    solver.step(dt / 100)  #clean this up in the future...works for now.

    if args['--join']:
        logger.info('beginning join operation')
        logger.info(data_dir + '/final_checkpoint/')
        post.merge_process_files(data_dir + '/final_checkpoint/',
                                 cleanup=False)
        logger.info(data_dir + '/checkpoint/')
        post.merge_process_files(data_dir + '/checkpoint/', cleanup=False)
        for task in analysis_tasks.keys():
            logger.info(analysis_tasks[task].base_path)
            post.merge_process_files(analysis_tasks[task].base_path,
                                     cleanup=False)

    end_time = time.time()
    logger.info('Iterations: %i' % solver.iteration)
    logger.info('Sim end time: %f' % solver.sim_time)
    logger.info('Run time: %.2f sec' % (end_time - start_time))
    logger.info('Run time: %f cpu-hr' % (
        (end_time - start_time) / 60 / 60 * domain.domain.dist.comm_cart.size))
    logger.info('Iter/sec: {:g}'.format(solver.iteration /
                                        (end_time - start_time)))
Ejemplo n.º 6
0
# Analysis handler
analysis = solver.evaluator.add_file_handler('analysis', iter=5, max_writes=100)

analysis.add_task("integ(u,'x')", layout='g', name='<u>')
analysis.add_task("integ(u**2,'x')", layout='g', name='<uu>')

analysis.add_system(solver.state, layout='g')

# Simulation
import time

# Main loop
dt = 1e-2
start_time = time.time()
while solver.ok:
    solver.step(dt)
    if solver.iteration % 100 == 0:
        print('Completed iteration {}'.format(solver.iteration))

end_time = time.time()
print('Runtime:', end_time-start_time)

# Merge process files
from dedalus.tools import post
post.merge_process_files("analysis", cleanup=True)

# Merge files into one
import pathlib
set_paths = list(pathlib.Path("analysis").glob("analysis_s*.h5"))
post.merge_sets("analysis/1d_kdv_analysis.h5", set_paths, cleanup=True)
Ejemplo n.º 7
0
from dedalus.tools import post
post.merge_process_files("snapshots", cleanup=True)
post.merge_process_files("profiles", cleanup=True)
post.merge_process_files("series", cleanup=True)

Ejemplo n.º 8
0
# ***Only need the LAST checkpoint folder - otherwise, merging the data
# will take approximately forever***


from dedalus.tools import post
# post.merge_process_files("slices", cleanup=True)
# post.merge_process_files("dump",cleanup=True)
post.merge_process_files("checkpoint",cleanup=True)

import subprocess
# print(subprocess.check_output("find slices", shell=True).decode())
# print(subprocess.check_output("find dump", shell=True).decode())
print(subprocess.check_output("find checkpoint", shell=True).decode())



import pathlib
# set_paths=list(pathlib.Path("slices").glob("slices_s*.h5"))
# post.merge_sets("slices/slices.h5",set_paths,cleanup=True)
# set_paths=list(pathlib.Path("dump").glob("dump_s*.h5"))
# post.merge_sets("dump/dump.h5",set_paths,cleanup=True)
set_paths=list(pathlib.Path("checkpoint").glob("checkpoint_s*.h5"))
post.merge_sets("checkpoint/checkpoint.h5",set_paths,cleanup=True)



# Rename folders
import os
# os.rename("dump", "dump_old")
# os.rename("slices", "slices_old")
os.rename("checkpoint", "checkpoint_old")
    def solve_IVP(self,
                  dt,
                  CFL,
                  data_dir,
                  analysis_tasks,
                  task_args=(),
                  pre_loop_args=(),
                  task_kwargs={},
                  pre_loop_kwargs={},
                  time_div=None,
                  track_fields=['Pe'],
                  threeD=False,
                  Hermitian_cadence=100,
                  no_join=False,
                  mode='append'):
        """Logic for a while-loop that solves an initial value problem.

        Parameters
        ----------
        dt                  : float
            The initial timestep of the simulation
        CFL                 : a Dedalus CFL object
            A CFL object that calculates the timestep of the simulation on the fly
        data_dir            : string
            The parent directory of output files
        analysis_tasks      : OrderedDict()
            An OrderedDict of dedalus FileHandler objects
        task_args, task_kwargs : list, dict, optional
            arguments & keyword arguments to the self._special_tasks() function
        pre_loop_args, pre_loop_kwargs: list, dict, optional
            arguments & keyword arguments to the self.pre_loop_setup() function
        time_div            : float, optional
            A siulation time to divide the normal time by for easier output tracking
        threeD              : bool, optional
            If True, occasionally force the solution to grid space to remove Hermitian errors
        Hermitian_cadence   : int, optional
            The number of timesteps between grid space forcings in 3D.
        no_join             : bool, optional
            If True, do not join files at the end of the simulation run.
        mode                : string, optional
            Dedalus output mode for final checkpoint. "append" or "overwrite"
        args, kwargs        : list and dictionary
            Additional arguments and keyword arguments to be passed to the self.special_tasks() function
        """

        # Flow properties
        self.flow = flow_tools.GlobalFlowProperty(self.solver, cadence=1)
        for f in track_fields:
            self.flow.add_property(f, name=f)

        self.pre_loop_setup(*pre_loop_args, **pre_loop_kwargs)

        start_time = time.time()
        # Main loop
        count = 0
        try:
            logger.info('Starting loop')
            init_time = self.solver.sim_time
            start_iter = self.solver.iteration
            while (self.solver.ok):
                dt = CFL.compute_dt()
                self.solver.step(dt)  #, trim=True)

                # prevents blow-up over long timescales in 3D due to hermitian-ness
                effective_iter = self.solver.iteration - start_iter
                if threeD and effective_iter % Hermitian_cadence == 0:
                    for field in self.solver.state.fields:
                        field.require_grid_space()

                self.special_tasks(*task_args, **task_kwargs)

                #reporting string
                self.iteration_report(dt, track_fields, time_div=time_div)

                if not np.isfinite(self.flow.grid_average(track_fields[0])):
                    break
        except:
            raise
            logger.error('Exception raised, triggering end of main loop.')
        finally:
            end_time = time.time()
            main_loop_time = end_time - start_time
            n_iter_loop = self.solver.iteration - 1
            logger.info('Iterations: {:d}'.format(n_iter_loop))
            logger.info('Sim end time: {:f}'.format(self.solver.sim_time))
            logger.info('Run time: {:f} sec'.format(main_loop_time))
            logger.info('Run time: {:f} cpu-hr'.format(
                main_loop_time / 60 / 60 *
                self.de_domain.domain.dist.comm_cart.size))
            logger.info('iter/sec: {:f} (main loop only)'.format(
                n_iter_loop / main_loop_time))
            try:
                final_checkpoint = Checkpoint(
                    data_dir, checkpoint_name='final_checkpoint')
                final_checkpoint.set_checkpoint(self.solver,
                                                wall_dt=1,
                                                mode=mode)
                self.solver.step(
                    dt)  #clean this up in the future...works for now.
                post.merge_process_files(data_dir + '/final_checkpoint/',
                                         cleanup=False)
            except:
                raise
                print('cannot save final checkpoint')
            finally:
                if not no_join:
                    logger.info('beginning join operation')
                    post.merge_analysis(data_dir + 'checkpoint')

                    for key, task in analysis_tasks.items():
                        logger.info(task.base_path)
                        post.merge_analysis(task.base_path)

                logger.info(40 * "=")
                logger.info('Iterations: {:d}'.format(n_iter_loop))
                logger.info('Sim end time: {:f}'.format(self.solver.sim_time))
                logger.info('Run time: {:f} sec'.format(main_loop_time))
                logger.info('Run time: {:f} cpu-hr'.format(
                    main_loop_time / 60 / 60 *
                    self.de_domain.domain.dist.comm_cart.size))
                logger.info('iter/sec: {:f} (main loop only)'.format(
                    n_iter_loop / main_loop_time))
    end_time = time.time()
    main_loop_time = end_time - start_time
    n_iter_loop = solver.iteration - 1
    logger.info('Iterations: {:d}'.format(n_iter_loop))
    logger.info('Sim end time: {:f}'.format(solver.sim_time))
    logger.info('Run time: {:f} sec'.format(main_loop_time))
    logger.info('Run time: {:f} cpu-hr'.format(main_loop_time / 60 / 60 *
                                               domain.dist.comm_cart.size))
    logger.info('iter/sec: {:f} (main loop only)'.format(n_iter_loop /
                                                         main_loop_time))
    try:
        final_checkpoint = Checkpoint(data_dir,
                                      checkpoint_name='final_checkpoint')
        final_checkpoint.set_checkpoint(solver, wall_dt=1, mode=mode)
        solver.step(dt)  #clean this up in the future...works for now.
        post.merge_process_files(data_dir + '/final_checkpoint/',
                                 cleanup=False)
    except:
        raise
        print('cannot save final checkpoint')
    finally:
        if not args['--no_join']:
            logger.info('beginning join operation')
            post.merge_analysis(data_dir + 'checkpoint')

            for key, task in analysis_tasks.items():
                logger.info(task.base_path)
                post.merge_analysis(task.base_path)

        logger.info(40 * "=")
        logger.info('Iterations: {:d}'.format(n_iter_loop))
        logger.info('Sim end time: {:f}'.format(solver.sim_time))
Ejemplo n.º 11
0
def FC_polytrope(Rayleigh=1e4,
                 Prandtl=1,
                 aspect_ratio=4,
                 Taylor=None,
                 theta=0,
                 nz=128,
                 nx=None,
                 ny=None,
                 threeD=False,
                 mesh=None,
                 n_rho_cz=3,
                 epsilon=1e-4,
                 gamma=5 / 3,
                 run_time=23.5,
                 run_time_buoyancies=None,
                 run_time_iter=np.inf,
                 fixed_T=False,
                 fixed_flux=False,
                 mixed_flux_T=False,
                 const_mu=True,
                 const_kappa=True,
                 dynamic_diffusivities=False,
                 split_diffusivities=False,
                 restart=None,
                 start_new_files=False,
                 rk222=False,
                 safety_factor=0.2,
                 max_writes=20,
                 no_slip=False,
                 data_dir='./',
                 out_cadence=0.1,
                 no_coeffs=False,
                 no_volumes=False,
                 no_join=False,
                 verbose=False):

    import dedalus.public as de
    from dedalus.tools import post
    from dedalus.extras import flow_tools

    import time
    import os
    import sys
    from stratified_dynamics import polytropes
    from tools.checkpointing import Checkpoint

    checkpoint_min = 30

    initial_time = time.time()

    logger.info("Starting Dedalus script {:s}".format(sys.argv[0]))

    if nx is None:
        nx = int(np.round(nz * aspect_ratio))
    if threeD and ny is None:
        ny = nx

    if threeD:
        atmosphere = polytropes.FC_polytrope_3d(nx=nx, ny=ny, nz=nz, mesh=mesh, constant_kappa=const_kappa, constant_mu=const_mu,\
                                        epsilon=epsilon, gamma=gamma, n_rho_cz=n_rho_cz, aspect_ratio=aspect_ratio,\
                                        fig_dir=data_dir)
    else:
        if dynamic_diffusivities:
            atmosphere = polytropes.FC_polytrope_2d_kappa_mu(nx=nx, nz=nz, constant_kappa=const_kappa, constant_mu=const_mu,\
                                        epsilon=epsilon, gamma=gamma, n_rho_cz=n_rho_cz, aspect_ratio=aspect_ratio,\
                                        fig_dir=data_dir)
        else:
            atmosphere = polytropes.FC_polytrope_2d(nx=nx, nz=nz, constant_kappa=const_kappa, constant_mu=const_mu,\
                                        epsilon=epsilon, gamma=gamma, n_rho_cz=n_rho_cz, aspect_ratio=aspect_ratio,\
                                        fig_dir=data_dir)
    if epsilon < 1e-4:
        ncc_cutoff = 1e-14
    elif epsilon > 1e-1:
        ncc_cutoff = 1e-6
    else:
        ncc_cutoff = 1e-10

    if threeD:
        atmosphere.set_IVP_problem(Rayleigh,
                                   Prandtl,
                                   Taylor=Taylor,
                                   theta=theta,
                                   ncc_cutoff=ncc_cutoff,
                                   split_diffusivities=split_diffusivities)
    else:
        atmosphere.set_IVP_problem(Rayleigh,
                                   Prandtl,
                                   ncc_cutoff=ncc_cutoff,
                                   split_diffusivities=split_diffusivities)

    bc_dict = {
        'stress_free': False,
        'no_slip': False,
        'fixed_flux': False,
        'mixed_flux_temperature': False,
        'fixed_temperature': False
    }
    if no_slip:
        bc_dict['no_slip'] = True
    else:
        bc_dict['stress_free'] = True

    if fixed_flux:
        bc_dict['fixed_flux'] = True
    elif mixed_flux_T:
        bc_dict['mixed_flux_temperature'] = True
    else:
        bc_dict['fixed_temperature'] = True
    atmosphere.set_BC(**bc_dict)

    problem = atmosphere.get_problem()

    if atmosphere.domain.distributor.rank == 0:
        if not os.path.exists('{:s}/'.format(data_dir)):
            os.mkdir('{:s}/'.format(data_dir))

    if rk222:
        logger.info("timestepping using RK222")
        ts = de.timesteppers.RK222
        cfl_safety_factor = safety_factor * 2
    else:
        logger.info("timestepping using RK443")
        ts = de.timesteppers.RK443
        cfl_safety_factor = safety_factor * 4

    # Build solver
    solver = problem.build_solver(ts)

    #Check atmosphere
    logger.info("thermal_time = {:g}, top_thermal_time = {:g}".format(atmosphere.thermal_time,\
                                                                    atmosphere.top_thermal_time))
    logger.info("full atm HS check")
    atmosphere.check_atmosphere(make_plots=False,
                                rho=atmosphere.get_full_rho(solver),
                                T=atmosphere.get_full_T(solver))

    if restart is None or start_new_files:
        mode = "overwrite"
    else:
        mode = "append"

    logger.info('checkpointing in {}'.format(data_dir))
    checkpoint = Checkpoint(data_dir)

    if restart is None:
        atmosphere.set_IC(solver)
        dt = None
    else:
        logger.info("restarting from {}".format(restart))
        dt = checkpoint.restart(restart, solver)

    checkpoint.set_checkpoint(solver, wall_dt=checkpoint_min * 60, mode=mode)

    if run_time_buoyancies != None:
        solver.stop_sim_time = solver.sim_time + run_time_buoyancies * atmosphere.buoyancy_time
    else:
        solver.stop_sim_time = 100 * atmosphere.thermal_time

    solver.stop_iteration = solver.iteration + run_time_iter
    solver.stop_wall_time = run_time * 3600
    report_cadence = 1
    output_time_cadence = out_cadence * atmosphere.buoyancy_time
    Hermitian_cadence = 100

    logger.info("stopping after {:g} time units".format(solver.stop_sim_time))
    logger.info("output cadence = {:g}".format(output_time_cadence))

    if threeD:
        analysis_tasks = atmosphere.initialize_output(
            solver,
            data_dir,
            sim_dt=output_time_cadence,
            coeffs_output=not (no_coeffs),
            mode=mode,
            max_writes=max_writes,
            volumes_output=not (no_volumes))
    else:
        analysis_tasks = atmosphere.initialize_output(
            solver,
            data_dir,
            sim_dt=output_time_cadence,
            coeffs_output=not (no_coeffs),
            mode=mode,
            max_writes=max_writes)

    #Set up timestep defaults
    max_dt = output_time_cadence
    if dt is None: dt = max_dt

    cfl_cadence = 1
    cfl_threshold = 0.1
    CFL = flow_tools.CFL(solver,
                         initial_dt=dt,
                         cadence=cfl_cadence,
                         safety=cfl_safety_factor,
                         max_change=1.5,
                         min_change=0.5,
                         max_dt=max_dt,
                         threshold=cfl_threshold)
    if threeD:
        CFL.add_velocities(('u', 'v', 'w'))
    else:
        CFL.add_velocities(('u', 'w'))

    # Flow properties
    flow = flow_tools.GlobalFlowProperty(solver, cadence=1)
    flow.add_property("Re_rms", name='Re')
    if verbose:
        flow.add_property("Pe_rms", name='Pe')
        flow.add_property("Nusselt_AB17", name='Nusselt')

    start_iter = solver.iteration
    start_sim_time = solver.sim_time

    try:
        start_time = time.time()
        start_iter = solver.iteration
        logger.info('starting main loop')
        good_solution = True
        first_step = True
        while solver.ok and good_solution:
            dt = CFL.compute_dt()
            # advance
            solver.step(dt)

            effective_iter = solver.iteration - start_iter
            Re_avg = flow.grid_average('Re')

            if threeD and effective_iter % Hermitian_cadence == 0:
                for field in solver.state.fields:
                    field.require_grid_space()

            # update lists
            if effective_iter % report_cadence == 0:
                log_string = 'Iteration: {:5d}, Time: {:8.3e} ({:8.3e}), dt: {:8.3e}, '.format(
                    solver.iteration - start_iter, solver.sim_time,
                    (solver.sim_time - start_sim_time) /
                    atmosphere.buoyancy_time, dt)
                if verbose:
                    log_string += '\n\t\tRe: {:8.5e}/{:8.5e}'.format(
                        Re_avg, flow.max('Re'))
                    log_string += '; Pe: {:8.5e}/{:8.5e}'.format(
                        flow.grid_average('Pe'), flow.max('Pe'))
                    log_string += '; Nu: {:8.5e}/{:8.5e}'.format(
                        flow.grid_average('Nusselt'), flow.max('Nusselt'))
                else:
                    log_string += 'Re: {:8.3e}/{:8.3e}'.format(
                        Re_avg, flow.max('Re'))
                logger.info(log_string)

            if not np.isfinite(Re_avg):
                good_solution = False
                logger.info(
                    "Terminating run.  Trapped on Reynolds = {}".format(
                        Re_avg))

            if first_step:
                if verbose:
                    import matplotlib
                    matplotlib.use('Agg')
                    import matplotlib.pyplot as plt
                    fig = plt.figure()
                    ax = fig.add_subplot(1, 1, 1)
                    ax.spy(solver.pencils[0].L,
                           markersize=1,
                           markeredgewidth=0.0)
                    fig.savefig(data_dir + "sparsity_pattern.png", dpi=1200)

                    import scipy.sparse.linalg as sla
                    LU = sla.splu(solver.pencils[0].LHS.tocsc(),
                                  permc_spec='NATURAL')
                    fig = plt.figure()
                    ax = fig.add_subplot(1, 2, 1)
                    ax.spy(LU.L.A, markersize=1, markeredgewidth=0.0)
                    ax = fig.add_subplot(1, 2, 2)
                    ax.spy(LU.U.A, markersize=1, markeredgewidth=0.0)
                    fig.savefig(data_dir + "sparsity_pattern_LU.png", dpi=1200)

                    logger.info("{} nonzero entries in LU".format(LU.nnz))
                    logger.info("{} nonzero entries in LHS".format(
                        solver.pencils[0].LHS.tocsc().nnz))
                    logger.info("{} fill in factor".format(
                        LU.nnz / solver.pencils[0].LHS.tocsc().nnz))
                first_step = False
                start_time = time.time()
    except:
        logger.error('Exception raised, triggering end of main loop.')
    finally:
        end_time = time.time()

        # Print statistics
        elapsed_time = end_time - start_time
        elapsed_sim_time = solver.sim_time
        N_iterations = solver.iteration - 1
        logger.info('main loop time: {:e}'.format(elapsed_time))
        logger.info('Iterations: {:d}'.format(N_iterations))
        logger.info('iter/sec: {:g}'.format(N_iterations / (elapsed_time)))
        if N_iterations > 0:
            logger.info('Average timestep: {:e}'.format(elapsed_sim_time /
                                                        N_iterations))

        if not no_join:
            logger.info('beginning join operation')
            try:
                final_checkpoint = Checkpoint(
                    data_dir, checkpoint_name='final_checkpoint')
                final_checkpoint.set_checkpoint(solver,
                                                wall_dt=1,
                                                mode="append")
                solver.step(dt)  #clean this up in the future...works for now.
                post.merge_process_files(data_dir + '/final_checkpoint/',
                                         cleanup=False)
            except:
                print('cannot save final checkpoint')

            logger.info(data_dir + '/checkpoint/')
            post.merge_process_files(data_dir + '/checkpoint/', cleanup=False)

            for task in analysis_tasks.keys():
                logger.info(analysis_tasks[task].base_path)
                post.merge_process_files(analysis_tasks[task].base_path,
                                         cleanup=False)

        if (atmosphere.domain.distributor.rank == 0):

            logger.info('main loop time: {:e}'.format(elapsed_time))
            if start_iter > 1:
                logger.info('Iterations (this run): {:d}'.format(N_iterations -
                                                                 start_iter))
                logger.info('Iterations (total): {:d}'.format(N_iterations -
                                                              start_iter))
            logger.info('iter/sec: {:g}'.format(N_iterations / (elapsed_time)))
            if N_iterations > 0:
                logger.info('Average timestep: {:e}'.format(elapsed_sim_time /
                                                            N_iterations))

            N_TOTAL_CPU = atmosphere.domain.distributor.comm_cart.size

            # Print statistics
            print('-' * 40)
            total_time = end_time - initial_time
            main_loop_time = end_time - start_time
            startup_time = start_time - initial_time
            n_steps = solver.iteration - 1
            print('  startup time:', startup_time)
            print('main loop time:', main_loop_time)
            print('    total time:', total_time)
            if n_steps > 0:
                print('    iterations:', n_steps)
                print(' loop sec/iter:', main_loop_time / n_steps)
                print('    average dt:', solver.sim_time / n_steps)
                print(
                    "          N_cores, Nx, Nz, startup     main loop,   main loop/iter, main loop/iter/grid, n_cores*main loop/iter/grid"
                )
                print(
                    'scaling:', ' {:d} {:d} {:d}'.format(N_TOTAL_CPU, nx, nz),
                    ' {:8.3g} {:8.3g} {:8.3g} {:8.3g} {:8.3g}'.format(
                        startup_time, main_loop_time, main_loop_time / n_steps,
                        main_loop_time / n_steps / (nx * nz),
                        N_TOTAL_CPU * main_loop_time / n_steps / (nx * nz)))
            print('-' * 40)
Ejemplo n.º 12
0
try:
    while solver.ok:
        dt = CFL.compute_dt()
        dt = solver.step(dt)
        if (solver.iteration - 1) % 1 == 0:
            logger.info('Iteration: {:.2e}, Time: {:.2e}, dt: {:.2e}'.format(solver.iteration, solver.sim_time, dt) +\
                        'Max Re = {:.2e}, Circ = {:.2e}'.format(flow.max('Re'), flow.max('circ')))
        if np.isnan(flow.max('v_rms')):
            logger.info('NaN, breaking.')
            break
except:
    logger.error('Exception raised, triggering end of main loop.')
    raise
finally:
    final_checkpoint = Checkpoint(data_dir, checkpoint_name='final_checkpoint')
    final_checkpoint.set_checkpoint(solver, wall_dt=1, mode="append")
    solver.step(dt / 1000)  #clean this up in the future...works for now.
    for t in [checkpoint, final_checkpoint]:
        post.merge_process_files(t.checkpoint_dir, cleanup=False)
    for t in [slices, profiles, scalars]:
        post.merge_process_files(t.base_path, cleanup=False)
    end_time = time.time()
    logger.info('Iterations: %i' % solver.iteration)
    logger.info('Sim end time: %f' % solver.sim_time)
    logger.info('Run time: %.2f sec' % (end_time - start_time))
    logger.info('Iter/sec: %.2f ' % (solver.iteration /
                                     (end_time - start_time)))
    logger.info(
        'Run time: %f cpu-hr' %
        ((end_time - start_time) / 60 / 60 * domain.dist.comm_cart.size))
Ejemplo n.º 13
0
def FC_polytrope(dynamics_file,
                 Rayleigh=1e4,
                 Prandtl=1,
                 aspect_ratio=4,
                 Taylor=None,
                 theta=0,
                 nz=128,
                 nx=None,
                 ny=None,
                 threeD=False,
                 mesh=None,
                 n_rho_cz=3,
                 epsilon=1e-4,
                 gamma=5 / 3,
                 run_time=23.5,
                 run_time_buoyancies=None,
                 run_time_iter=np.inf,
                 fixed_T=False,
                 fixed_flux=False,
                 mixed_flux_T=False,
                 const_mu=True,
                 const_kappa=True,
                 dynamic_diffusivities=False,
                 split_diffusivities=False,
                 chemistry=True,
                 ChemicalPrandtl=1,
                 Qu_0=5e-8,
                 phi_0=10,
                 restart=None,
                 start_new_files=False,
                 scalar_file=None,
                 rk222=False,
                 safety_factor=0.2,
                 max_writes=20,
                 data_dir='./',
                 out_cadence=0.1,
                 no_coeffs=False,
                 no_join=False,
                 verbose=False):

    import dedalus.public as de
    from dedalus.tools import post
    from dedalus.extras import flow_tools

    import time
    import os
    import sys
    from stratified_dynamics import polytropes
    from tools.checkpointing import Checkpoint

    checkpoint_min = 30

    initial_time = time.time()

    logger.info("Starting Dedalus script {:s}".format(sys.argv[0]))

    if nx is None:
        nx = int(np.round(nz * aspect_ratio))
    if threeD and ny is None:
        ny = nx

    eqn_dict = {
        'nx': nx,
        'nz': nz,
        'constant_kappa': const_kappa,
        'constant_mu': const_mu,
        'epsilon': epsilon,
        'gamma': gamma,
        'n_rho_cz': n_rho_cz,
        'aspect_ratio': aspect_ratio,
        'fig_dir': data_dir
    }
    if threeD:
        eqn_dict['mesh'] = mesh
        eqn_dict['nz'] = nz
        atmosphere = polytropes.FC_polytrope_rxn_3d(**eqn_dict)

    else:
        if dynamic_diffusivities:
            atmosphere = polytropes.FC_polytrope_2d_kappa(**eqn_dict)
        else:
            if chemistry:
                atmosphere = polytropes.FC_polytrope_rxn_2d(**eqn_dict)
            else:
                atmosphere = polytropes.FC_polytrope_2d(**eqn_dict)

    if epsilon < 1e-4:
        ncc_cutoff = 1e-14
    elif epsilon > 1e-1:
        ncc_cutoff = 1e-6
    else:
        ncc_cutoff = 1e-10

    problem_dict = {
        'ncc_cutoff': ncc_cutoff,
        'split_diffusivities': split_diffusivities
    }
    if threeD:
        problem_dict['Taylor'] = Taylor
        problem_dict['theta'] = theta
    if chemistry:
        problem_dict['ChemicalPrandtl'] = ChemicalPrandtl
        problem_dict['Qu_0'] = Qu_0
        problem_dict['phi_0'] = phi_0

    atmosphere.set_IVP_problem(Rayleigh, Prandtl, **problem_dict)

    if fixed_flux:
        atmosphere.set_BC(fixed_flux=True, stress_free=True)
    elif mixed_flux_T:
        atmosphere.set_BC(mixed_flux_temperature=True, stress_free=True)
    else:
        atmosphere.set_BC(fixed_temperature=True, stress_free=True)

    problem = atmosphere.get_problem()

    if atmosphere.domain.distributor.rank == 0:
        if not os.path.exists('{:s}/'.format(data_dir)):
            os.mkdir('{:s}/'.format(data_dir))

    if rk222:
        logger.info("timestepping using RK222")
        ts = de.timesteppers.RK222
        cfl_safety_factor = safety_factor * 2
    else:
        logger.info("timestepping using RK443")
        ts = de.timesteppers.RK443
        cfl_safety_factor = safety_factor * 4

    # Build solver
    solver = problem.build_solver(ts)

    #Check atmosphere
    logger.info("thermal_time = {:g}, top_thermal_time = {:g}".format(atmosphere.thermal_time,\
                                                                    atmosphere.top_thermal_time))
    logger.info("full atm HS check")
    atmosphere.check_atmosphere(make_plots=False,
                                rho=atmosphere.get_full_rho(solver),
                                T=atmosphere.get_full_T(solver))

    if restart is None:
        mode = "overwrite"
    else:
        mode = "append"

    logger.info('checkpointing in {}'.format(data_dir))
    checkpoint = Checkpoint(data_dir)

    if restart is None:
        atmosphere.set_IC(solver)
        dt = None
    else:
        logger.info("restarting from {}".format(restart))
        dt = checkpoint.restart(restart, solver)

    checkpoint.set_checkpoint(solver, wall_dt=checkpoint_min * 60, mode=mode)

    if run_time_buoyancies != None:
        solver.stop_sim_time = solver.sim_time + run_time_buoyancies * atmosphere.buoyancy_time
    else:
        solver.stop_sim_time = 100 * atmosphere.thermal_time

    solver.stop_iteration = solver.iteration + run_time_iter
    solver.stop_wall_time = run_time * 3600
    report_cadence = 1
    output_time_cadence = out_cadence * atmosphere.buoyancy_time
    Hermitian_cadence = 100

    logger.info("stopping after {:g} time units".format(solver.stop_sim_time))
    logger.info("output cadence = {:g}".format(output_time_cadence))
    analysis_tasks = atmosphere.initialize_output(
        solver,
        data_dir,
        sim_dt=output_time_cadence,
        coeffs_output=not (no_coeffs),
        mode=mode,
        max_writes=max_writes)

    # Reinjecting dynamics and tracers if desired
    logger.info("Re-injecting scalars")

    def reset_variable(key, h5_val, grid=False, grad=False):
        # Get variable
        k = solver.state[key]
        k.set_scales(1, keep_data=True)

        # Set grid or coefficient space
        if grid:
            gc = 'g'
            slice = solver.domain.dist.grid_layout.slices(k.meta[:]['scale'])
        else:
            gc = 'c'
            slice = solver.domain.dist.coeff_layout.slices(k.meta[:]['scale'])

        # Set initial value of variable
        k[gc] = h5_val[slice]

        if grad:  # Set gradient if called for
            k.differentiate('z', out=k)

        logger.info("Re-injecting: {}".format(key))
        return k

    objects = {}
    # Set all the dynamic variables

    if threeD:
        keys = ['u', 'u_z', 'v', 'v_z', 'w', 'w_z', 'T1', 'T1_z', 'ln_rho1']
        grads = [
            False, True, False, True, False, True, False, True, False, False,
            True, False, True
        ]
        h5_keys = ['u', 'u', 'v', 'v', 'w', 'w', 'T', 'T', 'ln_rho']
    else:
        keys = ['u', 'u_z', 'w', 'w_z', 'T1', 'T1_z', 'ln_rho1']
        grads = [
            False, True, False, True, False, True, False, False, True, False,
            True
        ]
        h5_keys = ['u', 'u', 'w', 'w', 'T', 'T', 'ln_rho']
    h5File_c = h5py.File(dynamics_file, 'r')
    dt = h5File_c['scales']['timestep'][-1]
    h5File_c = h5File_c['tasks']
    for i, K in enumerate(keys):
        # Restarting with final profile of run
        objects[K] = reset_variable(K, h5File_c[h5_keys[i]][-1], grad=grads[i])

    if scalar_file != 'None':
        keys = ['f', 'f_z', 'C', 'C_z', 'G', 'G_z']
        grads = [False, True, False, True]
        h5_keys = ['f', 'f', 'C', 'C', 'G', 'G']
        h5File_g = h5py.File(scalar_file, 'r')['tasks']
        for i, K in enumerate(keys):
            # Restarting with initial profile
            objects[K] = reset_variable(K,
                                        h5File_g[h5_keys[i]][0],
                                        grid=True,
                                        grad=grads[i])

    #Set up timestep defaults
    max_dt = output_time_cadence / 2
    if dt is None: dt = max_dt

    cfl_cadence = 1
    cfl_threshold = 0.1
    CFL = flow_tools.CFL(solver,
                         initial_dt=dt,
                         cadence=cfl_cadence,
                         safety=cfl_safety_factor,
                         max_change=1.5,
                         min_change=0.5,
                         max_dt=max_dt,
                         threshold=cfl_threshold)
    if threeD:
        CFL.add_velocities(('u', 'v', 'w'))
    else:
        CFL.add_velocities(('u', 'w'))

    # Flow properties
    flow = flow_tools.GlobalFlowProperty(solver, cadence=1)
    flow.add_property("Re_rms", name='Re')
    if verbose:
        flow.add_property("Pe_rms", name='Pe')
        flow.add_property("Nusselt_AB17", name='Nusselt')

    start_iter = solver.iteration
    start_sim_time = solver.sim_time

    try:
        start_time = time.time()
        start_iter = solver.iteration
        logger.info('starting main loop')
        good_solution = True
        first_step = True
        while solver.ok and good_solution:
            dt = CFL.compute_dt()
            # advance
            solver.step(dt)

            effective_iter = solver.iteration - start_iter

            if threeD and effective_iter % Hermitian_cadence == 0:
                for field in solver.state.fields:
                    field.require_grid_space()

            # update lists
            if effective_iter % report_cadence == 0:
                Re_avg = flow.grid_average('Re')
                log_string = 'Iteration: {:5d}, Time: {:8.3e} ({:8.3e}), dt: {:8.3e}, '.format(
                    solver.iteration - start_iter, solver.sim_time,
                    (solver.sim_time - start_sim_time) /
                    atmosphere.buoyancy_time, dt)
                if verbose:
                    log_string += '\n\t\tRe: {:8.5e}/{:8.5e}'.format(
                        Re_avg, flow.max('Re'))
                    log_string += '; Pe: {:8.5e}/{:8.5e}'.format(
                        flow.grid_average('Pe'), flow.max('Pe'))
                    log_string += '; Nu: {:8.5e}/{:8.5e}'.format(
                        flow.grid_average('Nusselt'), flow.max('Nusselt'))
                else:
                    log_string += 'Re: {:8.3e}/{:8.3e}'.format(
                        Re_avg, flow.max('Re'))
                logger.info(log_string)

            if not np.isfinite(Re_avg):
                good_solution = False
                logger.info(
                    "Terminating run.  Trapped on Reynolds = {}".format(
                        Re_avg))

            if first_step:
                if verbose:
                    import matplotlib
                    matplotlib.use('Agg')
                    import matplotlib.pyplot as plt
                    fig = plt.figure()
                    ax = fig.add_subplot(1, 1, 1)
                    ax.spy(solver.pencils[0].L,
                           markersize=1,
                           markeredgewidth=0.0)
                    fig.savefig(data_dir + "sparsity_pattern.png", dpi=1200)

                    import scipy.sparse.linalg as sla
                    LU = sla.splu(solver.pencils[0].LHS.tocsc(),
                                  permc_spec='NATURAL')
                    fig = plt.figure()
                    ax = fig.add_subplot(1, 2, 1)
                    ax.spy(LU.L.A, markersize=1, markeredgewidth=0.0)
                    ax = fig.add_subplot(1, 2, 2)
                    ax.spy(LU.U.A, markersize=1, markeredgewidth=0.0)
                    fig.savefig(data_dir + "sparsity_pattern_LU.png", dpi=1200)

                    logger.info("{} nonzero entries in LU".format(LU.nnz))
                    logger.info("{} nonzero entries in LHS".format(
                        solver.pencils[0].LHS.tocsc().nnz))
                    logger.info("{} fill in factor".format(
                        LU.nnz / solver.pencils[0].LHS.tocsc().nnz))
                first_step = False
                start_time = time.time()
    except:
        logger.error('Exception raised, triggering end of main loop.')
        raise
    finally:
        end_time = time.time()

        # Print statistics
        elapsed_time = end_time - start_time
        elapsed_sim_time = solver.sim_time
        N_iterations = solver.iteration - 1
        logger.info('main loop time: {:e}'.format(elapsed_time))
        logger.info('Iterations: {:d}'.format(N_iterations))
        logger.info('iter/sec: {:g}'.format(N_iterations / (elapsed_time)))
        if N_iterations > 0:
            logger.info('Average timestep: {:e}'.format(elapsed_sim_time /
                                                        N_iterations))

        if not no_join:
            logger.info('beginning join operation')
            try:
                final_checkpoint = Checkpoint(
                    data_dir, checkpoint_name='final_checkpoint')
                final_checkpoint.set_checkpoint(solver,
                                                wall_dt=1,
                                                mode="append")
                solver.step(dt)  #clean this up in the future...works for now.
                post.merge_process_files(data_dir + '/final_checkpoint/',
                                         cleanup=True)
            except:
                print('cannot save final checkpoint')

            logger.info(data_dir + '/checkpoint/')
            post.merge_process_files(data_dir + '/checkpoint/', cleanup=True)

            for task in analysis_tasks.keys():
                logger.info(analysis_tasks[task].base_path)
                post.merge_process_files(analysis_tasks[task].base_path,
                                         cleanup=True)

        if (atmosphere.domain.distributor.rank == 0):

            logger.info('main loop time: {:e}'.format(elapsed_time))
            if start_iter > 1:
                logger.info('Iterations (this run): {:d}'.format(N_iterations -
                                                                 start_iter))
                logger.info('Iterations (total): {:d}'.format(N_iterations -
                                                              start_iter))
            logger.info('iter/sec: {:g}'.format(N_iterations / (elapsed_time)))
            if N_iterations > 0:
                logger.info('Average timestep: {:e}'.format(elapsed_sim_time /
                                                            N_iterations))

            N_TOTAL_CPU = atmosphere.domain.distributor.comm_cart.size

            # Print statistics
            print('-' * 40)
            total_time = end_time - initial_time
            main_loop_time = end_time - start_time
            startup_time = start_time - initial_time
            n_steps = solver.iteration - 1
            print('  startup time:', startup_time)
            print('main loop time:', main_loop_time)
            print('    total time:', total_time)
            if n_steps > 0:
                print('    iterations:', n_steps)
                print(' loop sec/iter:', main_loop_time / n_steps)
                print('    average dt:', solver.sim_time / n_steps)
                print(
                    "          N_cores, Nx, Nz, startup     main loop,   main loop/iter, main loop/iter/grid, n_cores*main loop/iter/grid"
                )
                print(
                    'scaling:', ' {:d} {:d} {:d}'.format(N_TOTAL_CPU, nx, nz),
                    ' {:8.3g} {:8.3g} {:8.3g} {:8.3g} {:8.3g}'.format(
                        startup_time, main_loop_time, main_loop_time / n_steps,
                        main_loop_time / n_steps / (nx * nz),
                        N_TOTAL_CPU * main_loop_time / n_steps / (nx * nz)))
            print('-' * 40)
Ejemplo n.º 14
0
output_vars['cs']['g'] = cs['g'][:,np.newaxis,:]/Lx + 0.*y1
output_vars['ct']['g'] = ct['g'][:,np.newaxis,:]/Lx + 0.*y1
plt.plot(ct['g'][0,:])
plt.plot(output_vars['ct']['g'][0,0,:],'kx')
plt.savefig("output.png",dpi=400)

# calculate second cumulants
ingredients = ((psi, psi),(theta, theta), (theta, psi), (psi, theta))
inputs = dict(zip(out_fields[2:],ingredients))

start_time=time.time()
for k,v in inputs.items():
    v[0].set_scales(1, keep_data=True)
    v[1].set_scales(1, keep_data=True)
    logger.info("calculating {}".format(k))
    all_second_cumulants_spectral(output_vars[k], v[0], g=v[1],layout='xy')

end_time = time.time()
# create FileHandler to output data
field_system = FieldSystem(output_vars.values())
x_scale_factor = output_Nx/Nx
y_scale_factor = output_Ny/Ny
output_evaluator = Evaluator(field_system.domain, out_fields)
output_handler = output_evaluator.add_file_handler(ce2_file_path)
output_handler.add_system(field_system, scales=(x_scale_factor, y_scale_factor, y_scale_factor))

output_evaluator.evaluate_handlers(output_evaluator.handlers, timestep=0,sim_time=0, world_time=0, wall_time=0, iteration=0)

merge_process_files(ce2_file_path, cleanup=True)
logger.info("DNS to CE2 complete in {:f} sec".format(end_time-start_time))
Ejemplo n.º 15
0
import logging
logger = logging.getLogger(__name__)

import dedalus.public
from dedalus.tools  import post

from docopt import docopt

args = docopt(__doc__)

data_dir = args['<case>'][0]
base_path = os.path.abspath(data_dir)+'/'

cleanup = args['--cleanup']

logger.info("joining data from Dedalus run {:s}".format(data_dir))

if args['--data_type'] is not None:
    data_types=[args['--data_type']]
else:
    data_types = ['scalar', 'profiles', 'slices', 'coeffs', 'volumes', 'checkpoint']

for data_type in data_types:
    logger.info("merging {}".format(data_type))
    try:
        post.merge_process_files(base_path+data_type, cleanup=cleanup)
    except:
        logger.info("missing {}".format(data_type))
        
logger.info("done join operation for {:s}".format(data_dir))
Ejemplo n.º 16
0
def FC_convection(Rayleigh=1e6,
                  Prandtl=1,
                  stiffness=3,
                  m_rz=3,
                  gamma=5 / 3,
                  MHD=False,
                  MagneticPrandtl=1,
                  B0_amplitude=1,
                  n_rho_cz=1,
                  n_rho_rz=5,
                  nz_cz=128,
                  nz_rz=128,
                  nx=None,
                  width=None,
                  single_chebyshev=False,
                  rk222=False,
                  superstep=False,
                  dense=False,
                  nz_dense=64,
                  oz=False,
                  fixed_flux=False,
                  run_time=23.5,
                  run_time_buoyancies=np.inf,
                  run_time_iter=np.inf,
                  dynamic_diffusivities=False,
                  max_writes=20,
                  out_cadence=0.1,
                  no_coeffs=False,
                  no_join=False,
                  restart=None,
                  data_dir='./',
                  verbose=False,
                  label=None):
    def format_number(number, no_format_min=0.1, no_format_max=10):
        if number > no_format_max or number < no_format_min:
            try:
                mantissa = "{:e}".format(number).split("+")[0].split(
                    "e")[0].rstrip("0") or "0"
                power = "{:e}".format(number).split("+")[1].lstrip("0") or "0"
            except:
                mantissa = "{:e}".format(number).split("-")[0].split(
                    "e")[0].rstrip("0") or "0"
                power = "{:e}".format(number).split("-")[1].lstrip("0") or "0"
                power = "-" + power
            if mantissa[-1] == ".":
                mantissa = mantissa[:-1]
            mantissa += "e"
        else:
            mantissa = "{:f}".format(number).rstrip("0") or "0"
            if mantissa[-1] == ".":
                mantissa = mantissa[:-1]
            power = ""
        number_string = mantissa + power
        return number_string

    data_dir = './'
    # save data in directory named after script
    if data_dir[-1] != '/':
        data_dir += '/'
    data_dir += sys.argv[0].split('.py')[0]
    data_dir += "_nrhocz{}_Ra{}_S{}".format(format_number(n_rho_cz),
                                            format_number(Rayleigh),
                                            format_number(stiffness))
    if width:
        data_dir += "_erf{}".format(format_number(width))
    if args['--MHD']:
        data_dir += '_MHD'
    if label:
        data_dir += "_{}".format(label)
    data_dir += '/'

    from dedalus.tools.config import config

    config['logging']['filename'] = os.path.join(data_dir, 'logs/dedalus_log')
    config['logging']['file_level'] = 'DEBUG'

    import mpi4py.MPI
    if mpi4py.MPI.COMM_WORLD.rank == 0:
        if not os.path.exists('{:s}/'.format(data_dir)):
            os.makedirs('{:s}/'.format(data_dir))
        logdir = os.path.join(data_dir, 'logs')
        if not os.path.exists(logdir):
            os.mkdir(logdir)
    logger = logging.getLogger(__name__)
    logger.info("saving run in: {}".format(data_dir))

    import dedalus.public as de
    from dedalus.tools import post
    from dedalus.extras import flow_tools

    from dedalus.core.future import FutureField
    from stratified_dynamics import multitropes
    from tools.checkpointing import Checkpoint

    checkpoint_min = 30

    initial_time = time.time()

    logger.info("Starting Dedalus script {:s}".format(sys.argv[0]))

    constant_Prandtl = True
    stable_top = True
    mixed_temperature_flux = True

    # Set domain
    if nx is None:
        nx = nz_cz * 4

    if single_chebyshev:
        nz = nz_cz
        nz_list = [nz_cz]
    else:
        nz = nz_rz + nz_cz
        nz_list = [nz_rz, nz_cz]

    eqns_dict = {
        'stiffness': stiffness,
        'nx': nx,
        'nz': nz_list,
        'n_rho_cz': n_rho_cz,
        'n_rho_rz': n_rho_rz,
        'verbose': verbose,
        'width': width,
        'constant_Prandtl': constant_Prandtl,
        'stable_top': stable_top,
        'gamma': gamma,
        'm_rz': m_rz
    }
    if MHD:
        atmosphere = multitropes.FC_MHD_multitrope_guidefield_2d(**eqns_dict)

        atmosphere.set_IVP_problem(Rayleigh,
                                   Prandtl,
                                   MagneticPrandtl,
                                   guidefield_amplitude=B0_amplitude)
    else:
        atmosphere = multitropes.FC_multitrope(**eqns_dict)
        atmosphere.set_IVP_problem(Rayleigh, Prandtl)

    atmosphere.set_BC()
    problem = atmosphere.get_problem()

    if atmosphere.domain.distributor.rank == 0:
        if not os.path.exists('{:s}/'.format(data_dir)):
            os.mkdir('{:s}/'.format(data_dir))

    if rk222:
        logger.info("timestepping using RK222")
        ts = de.timesteppers.RK222
        cfl_safety_factor = 0.2 * 2
    else:
        logger.info("timestepping using RK443")
        ts = de.timesteppers.RK443
        cfl_safety_factor = 0.2 * 4

    # Build solver
    solver = problem.build_solver(ts)

    if restart is None:
        mode = "overwrite"
    else:
        mode = "append"

    checkpoint = Checkpoint(data_dir)

    # initial conditions
    if restart is None:
        atmosphere.set_IC(solver)
        dt = None
    else:
        logger.info("restarting from {}".format(restart))
        dt = checkpoint.restart(restart, solver)

    checkpoint.set_checkpoint(solver, wall_dt=checkpoint_min * 60, mode=mode)

    logger.info("thermal_time = {:g}, top_thermal_time = {:g}".format(
        atmosphere.thermal_time, atmosphere.top_thermal_time))

    max_dt = atmosphere.min_BV_time
    max_dt = atmosphere.buoyancy_time * out_cadence
    if dt is None: dt = max_dt

    report_cadence = 1
    output_time_cadence = out_cadence * atmosphere.buoyancy_time
    solver.stop_sim_time = solver.sim_time + run_time_buoyancies * atmosphere.buoyancy_time
    solver.stop_iteration = solver.iteration + run_time_iter
    solver.stop_wall_time = run_time * 3600

    logger.info("output cadence = {:g}".format(output_time_cadence))

    analysis_tasks = atmosphere.initialize_output(
        solver,
        data_dir,
        coeffs_output=not (no_coeffs),
        sim_dt=output_time_cadence,
        max_writes=max_writes,
        mode=mode)

    cfl_cadence = 1
    CFL = flow_tools.CFL(solver,
                         initial_dt=dt,
                         cadence=cfl_cadence,
                         safety=cfl_safety_factor,
                         max_change=1.5,
                         min_change=0.5,
                         max_dt=max_dt,
                         threshold=0.1)

    CFL.add_velocities(('u', 'w'))
    if MHD:
        CFL.add_velocities(
            ('Bx/sqrt(4*pi*rho_full)', 'Bz/sqrt(4*pi*rho_full)'))

    # Flow properties
    flow = flow_tools.GlobalFlowProperty(solver, cadence=1)
    flow.add_property("Re_rms", name='Re')
    if MHD:
        flow.add_property("abs(dx(Bx) + dz(Bz))", name='divB')

    try:
        start_time = time.time()
        while solver.ok:

            dt = CFL.compute_dt()
            # advance
            solver.step(dt)

            # update lists
            if solver.iteration % report_cadence == 0:
                Re_avg = flow.grid_average('Re')
                if not np.isfinite(Re_avg):
                    solver.ok = False
                log_string = 'Iteration: {:5d}, Time: {:8.3e} ({:8.3e}), dt: {:8.3e}, '.format(
                    solver.iteration, solver.sim_time,
                    solver.sim_time / atmosphere.buoyancy_time, dt)
                log_string += 'Re: {:8.3e}/{:8.3e}'.format(
                    Re_avg, flow.max('Re'))
                if MHD:
                    log_string += ', divB: {:8.3e}/{:8.3e}'.format(
                        flow.grid_average('divB'), flow.max('divB'))

                logger.info(log_string)
    except:
        logger.error('Exception raised, triggering end of main loop.')
        raise
    finally:
        end_time = time.time()

        # Print statistics
        elapsed_time = end_time - start_time
        elapsed_sim_time = solver.sim_time
        N_iterations = solver.iteration
        logger.info('main loop time: {:e}'.format(elapsed_time))
        logger.info('Iterations: {:d}'.format(N_iterations))
        logger.info('iter/sec: {:g}'.format(N_iterations / (elapsed_time)))
        logger.info('Average timestep: {:e}'.format(elapsed_sim_time /
                                                    N_iterations))

        if not no_join:
            logger.info('beginning join operation')
            logger.info(data_dir + '/checkpoint/')
            post.merge_process_files(data_dir + '/checkpoint/', cleanup=False)

            for task in analysis_tasks:
                logger.info(analysis_tasks[task].base_path)
                post.merge_process_files(analysis_tasks[task].base_path,
                                         cleanup=False)

        if (atmosphere.domain.distributor.rank == 0):

            logger.info('main loop time: {:e}'.format(elapsed_time))
            logger.info('Iterations: {:d}'.format(N_iterations))
            logger.info('iter/sec: {:g}'.format(N_iterations / (elapsed_time)))
            logger.info('Average timestep: {:e}'.format(elapsed_sim_time /
                                                        N_iterations))

            N_TOTAL_CPU = atmosphere.domain.distributor.comm_cart.size

            # Print statistics
            print('-' * 40)
            total_time = end_time - initial_time
            main_loop_time = end_time - start_time
            startup_time = start_time - initial_time
            n_steps = solver.iteration - 1
            print('  startup time:', startup_time)
            print('main loop time:', main_loop_time)
            print('    total time:', total_time)
            print('Iterations:', solver.iteration)
            print('Average timestep:', solver.sim_time / n_steps)
            print(
                "          N_cores, Nx, Nz, startup     main loop,   main loop/iter, main loop/iter/grid, n_cores*main loop/iter/grid"
            )
            print(
                'scaling:', ' {:d} {:d} {:d}'.format(N_TOTAL_CPU, nx, nz),
                ' {:8.3g} {:8.3g} {:8.3g} {:8.3g} {:8.3g}'.format(
                    startup_time, main_loop_time, main_loop_time / n_steps,
                    main_loop_time / n_steps / (nx * nz),
                    N_TOTAL_CPU * main_loop_time / n_steps / (nx * nz)))
            print('-' * 40)
Ejemplo n.º 17
0
# Flow properties
flow = flow_tools.GlobalFlowProperty(solver, cadence=1)
flow.add_property("integ((dx(ln_K)*px + dz(ln_K)*pz + Lp0 + Lp1)**2)/Lx/Lz",
                  name="residual")

# Main loop
try:
    logger.info('Starting loop')
    start_time = time.time()
    residual = np.inf
    while solver.ok and residual > tolerance:
        dt = solver.step(dt)
        residual = flow.max("residual")
        if (solver.iteration - 1) % 1 == 0:
            logger.info('Iteration: %i, Time: %e, dt: %e' %
                        (solver.iteration, solver.sim_time, dt))
            logger.info("Residual  = %e" % residual)
except:
    logger.error('Exception raised, triggering end of main loop.')
    raise
finally:
    end_time = time.time()
    logger.info('Iterations: %i' % solver.iteration)
    logger.info('Sim end time: %f' % solver.sim_time)
    logger.info('Run time: %.2f sec' % (end_time - start_time))
    logger.info(
        'Run time: %f cpu-hr' %
        ((end_time - start_time) / 60 / 60 * domain.dist.comm_cart.size))
    post.merge_process_files(snapshots.base_path, cleanup=True)
import glob

args = docopt(__doc__)

data_dir = args['<case>'][0]
for d in glob.glob(data_dir):
    base_path = os.path.abspath(d) + '/'

    cleanup = args['--cleanup']

    logger.info("joining data from Dedalus run {:s}".format(d))

    if args['--data_type'] is not None:
        data_types = [args['--data_type']]
    else:
        data_types = [
            'slices', 'profiles', 'final_checkpoint', 'checkpoint', 'volumes',
            'scalar'
        ]

    for data_type in data_types:
        logger.info("merging {}".format(data_type))
        try:
            print(base_path + data_type)
            post.merge_process_files('{:s}/{:s}/'.format(base_path, data_type),
                                     cleanup=cleanup)
        except:
            logger.info("missing {}".format(data_type))

    logger.info("done join operation for {:s}".format(d))
Ejemplo n.º 19
0
if __name__ == "__main__":

    import pathlib
    import shutil
    import tarfile
    from docopt import docopt
    from dedalus.tools import logging
    from dedalus.tools import post
    from dedalus.tests import test, bench, cov

    args = docopt(__doc__)
    if args['test']:
        test()
    elif args['bench']:
        bench()
    elif args['cov']:
        cov()
    elif args['get_config']:
        config_path = pathlib.Path(__file__).parent.joinpath('dedalus.cfg')
        shutil.copy(str(config_path), '.')
    elif args['get_examples']:
        example_path = pathlib.Path(__file__).parent.joinpath('examples.tar.gz')
        with tarfile.open(str(example_path), mode='r:gz') as archive:
            archive.extractall('dedalus_examples')
    elif args['merge_procs']:
        post.merge_process_files(args['<base_path>'], cleanup=args['--cleanup'])
    elif args['merge_sets']:
        post.merge_sets(args['<joint_path>'], args['<set_paths>'], cleanup=args['--cleanup'])

Ejemplo n.º 20
0
def FC_convection(Rayleigh=1e6, Prandtl=1, stiffness=1e4, m_rz=3, gamma=5/3,
                      n_rho_cz=3.5, n_rho_rz=1, 
                      nz_cz=128, nz_rz=128,
                      nx = None,
                      width=None,
                      single_chebyshev=False,
                      rk222=False,
                      superstep=False,
                      dense=False, nz_dense=64,
                      oz=False,
                      fixed_flux=False,
                      run_time=23.5, run_time_buoyancies=np.inf, run_time_iter=np.inf,
                      dynamic_diffusivities=False,
                      max_writes=20,out_cadence=0.1, no_coeffs=False, no_join=False,
                      restart=None, data_dir='./', verbose=False, label=None):

    def format_number(number, no_format_min=0.1, no_format_max=10):
        if number > no_format_max or number < no_format_min:
            try:
                mantissa = "{:e}".format(number).split("+")[0].split("e")[0].rstrip("0") or "0"
                power    = "{:e}".format(number).split("+")[1].lstrip("0") or "0"
            except:
                mantissa = "{:e}".format(number).split("-")[0].split("e")[0].rstrip("0") or "0"
                power    = "{:e}".format(number).split("-")[1].lstrip("0") or "0"
                power    = "-"+power
            if mantissa[-1]==".":
                mantissa = mantissa[:-1]
            mantissa += "e"
        else:
            mantissa = "{:f}".format(number).rstrip("0") or "0"
            if mantissa[-1]==".":
                mantissa = mantissa[:-1]
            power = ""
        number_string = mantissa+power
        return number_string
     
    # save data in directory named after script
    if data_dir[-1] != '/':
        data_dir += '/'
    data_dir += sys.argv[0].split('.py')[0]
    if fixed_flux:
        data_dir += '_flux'
    if dynamic_diffusivities:
        data_dir += '_dynamic'
    if oz:
        data_dir += '_oz'
    data_dir += "_nrhocz{}_Ra{}_S{}".format(format_number(n_rho_cz), format_number(Rayleigh), format_number(stiffness))
    if width:
        data_dir += "_erf{}".format(format_number(width))
    if label:
        data_dir += "_{}".format(label)
    data_dir += '/'

    from dedalus.tools.config import config
    
    config['logging']['filename'] = os.path.join(data_dir,'logs/dedalus_log')
    config['logging']['file_level'] = 'DEBUG'

    import mpi4py.MPI
    if mpi4py.MPI.COMM_WORLD.rank == 0:
        if not os.path.exists('{:s}/'.format(data_dir)):
            os.makedirs('{:s}/'.format(data_dir))
        logdir = os.path.join(data_dir,'logs')
        if not os.path.exists(logdir):
            os.mkdir(logdir)
    logger = logging.getLogger(__name__)
    logger.info("saving run in: {}".format(data_dir))
    
    import dedalus.public as de
    from dedalus.tools  import post
    from dedalus.extras import flow_tools


    from dedalus.core.future import FutureField
    from stratified_dynamics import multitropes
    from tools.checkpointing import Checkpoint
    
    checkpoint_min = 30
    
    initial_time = time.time()

    logger.info("Starting Dedalus script {:s}".format(sys.argv[0]))

    constant_Prandtl=True
    mixed_temperature_flux=None
    if oz:
        stable_top=True
        if not fixed_flux:
            mixed_temperature_flux=True
    else:
        stable_top=False
        
    # Set domain
    if nx is None:
        nx = nz_cz*4
        
    if single_chebyshev:
        nz = nz_cz
        nz_list = [nz_cz]
    else:
        if dense:
            nz = nz_rz+nz_dense+nz_cz
            #nz_list = [nz_rz, int(nz_dense/2), int(nz_dense/2), nz_cz]
            nz_list = [nz_rz, nz_dense, nz_cz]
        else:
            nz = nz_rz+nz_cz
            nz_list = [nz_rz, nz_cz]
    
    if dynamic_diffusivities:
        atmosphere = multitropes.FC_multitrope_2d_kappa_mu(nx=nx, nz=nz_list, stiffness=stiffness, m_rz=m_rz, gamma=gamma,
                                         n_rho_cz=n_rho_cz, n_rho_rz=n_rho_rz, 
                                         verbose=verbose, width=width,
                                         constant_Prandtl=constant_Prandtl,
                                         stable_top=stable_top)
    else:
        atmosphere = multitropes.FC_multitrope(nx=nx, nz=nz_list, stiffness=stiffness, m_rz=m_rz, gamma=gamma,
                                         n_rho_cz=n_rho_cz, n_rho_rz=n_rho_rz, 
                                         verbose=verbose, width=width,
                                         constant_Prandtl=constant_Prandtl,
                                         stable_top=stable_top)
    
    atmosphere.set_IVP_problem(Rayleigh, Prandtl)
        
    atmosphere.set_BC(mixed_temperature_flux=mixed_temperature_flux, fixed_flux=fixed_flux)
    problem = atmosphere.get_problem()

        
    if atmosphere.domain.distributor.rank == 0:
        if not os.path.exists('{:s}/'.format(data_dir)):
            os.makedirs('{:s}/'.format(data_dir))

    if rk222:
        logger.info("timestepping using RK222")
        ts = de.timesteppers.RK222
        cfl_safety_factor = 0.2*2
    else:
        logger.info("timestepping using RK443")
        ts = de.timesteppers.RK443
        cfl_safety_factor = 0.2*4

    # Build solver
    solver = problem.build_solver(ts)

    # initial conditions
    if restart is None:
        mode = "overwrite"
    else:
        mode = "append"
        
    logger.info("checkpointing in {}".format(data_dir))
    checkpoint = Checkpoint(data_dir)

    if restart is None:
        atmosphere.set_IC(solver)
        dt = None
    else:
        logger.info("restarting from {}".format(restart))
        dt = checkpoint.restart(restart, solver)
        
    checkpoint.set_checkpoint(solver, wall_dt=checkpoint_min*60, mode=mode)
    
    logger.info("thermal_time = {:g}, top_thermal_time = {:g}".format(atmosphere.thermal_time, atmosphere.top_thermal_time))
    
    max_dt = atmosphere.min_BV_time 
    max_dt = atmosphere.buoyancy_time*out_cadence
    if dt is None: dt = max_dt/5
        
    report_cadence = 1
    output_time_cadence = out_cadence*atmosphere.buoyancy_time
    solver.stop_sim_time  = solver.sim_time + run_time_buoyancies*atmosphere.buoyancy_time
    solver.stop_iteration = solver.iteration + run_time_iter
    solver.stop_wall_time = run_time*3600

    logger.info("output cadence = {:g}".format(output_time_cadence))

    analysis_tasks = atmosphere.initialize_output(solver, data_dir, coeffs_output=not(no_coeffs), sim_dt=output_time_cadence, max_writes=max_writes, mode=mode)

    
    cfl_cadence = 1
    CFL = flow_tools.CFL(solver, initial_dt=dt, cadence=cfl_cadence, safety=cfl_safety_factor,
                         max_change=1.5, min_change=0.5, max_dt=max_dt, threshold=0.1)

    if superstep:
        CFL_traditional = flow_tools.CFL(solver, initial_dt=max_dt, cadence=cfl_cadence, safety=cfl_safety_factor,
                                         max_change=1.5, min_change=0.5, max_dt=max_dt, threshold=0.1)

        CFL_traditional.add_velocities(('u', 'w'))
    
        vel_u = FutureField.parse('u', CFL.solver.evaluator.vars, CFL.solver.domain)
        delta_x = atmosphere.Lx/nx
        CFL.add_frequency(vel_u/delta_x)
        vel_w = FutureField.parse('w', CFL.solver.evaluator.vars, CFL.solver.domain)
        mean_delta_z_cz = atmosphere.Lz_cz/nz_cz
        CFL.add_frequency(vel_w/mean_delta_z_cz)
    else:
        CFL.add_velocities(('u', 'w'))


    
    # Flow properties
    flow = flow_tools.GlobalFlowProperty(solver, cadence=1)
    flow.add_property("Re_rms", name='Re')

    try:
        logger.info("starting main loop")
        start_time = time.time()
        start_iter = solver.iteration
        good_solution = True
        first_step = True
        while solver.ok and good_solution:

            dt = CFL.compute_dt()
            # advance
            solver.step(dt)

            effective_iter = solver.iteration - start_iter

            # update lists
            if effective_iter % report_cadence == 0:
                Re_avg = flow.grid_average('Re')
                log_string = 'Iteration: {:5d}, Time: {:8.3e} ({:8.3e}), '.format(solver.iteration, solver.sim_time, solver.sim_time/atmosphere.buoyancy_time)
                log_string += 'dt: {:8.3e}'.format(dt)
                if superstep:
                    dt_traditional = CFL_traditional.compute_dt()
                    log_string += ' (vs {:8.3e})'.format(dt_traditional)
                log_string += ', '
                log_string += 'Re: {:8.3e}/{:8.3e}'.format(Re_avg, flow.max('Re'))
                logger.info(log_string)

            if not np.isfinite(Re_avg):
                good_solution = False
                logger.info("Terminating run.  Trapped on Reynolds = {}".format(Re_avg))
                
            if first_step:
                if verbose:
                    import matplotlib
                    matplotlib.use('Agg')
                    import matplotlib.pyplot as plt
                    fig = plt.figure()
                    ax = fig.add_subplot(1,1,1)
                    ax.spy(solver.pencils[0].L, markersize=0.5, markeredgewidth=0.0)
                    fig.savefig(data_dir+"sparsity_pattern.png", dpi=2400)
                    #fig.savefig(data_dir+"sparsity_pattern.svg", format="svg")

                    import scipy.sparse.linalg as sla
                    LU = sla.splu(solver.pencils[0].LHS.tocsc(), permc_spec='NATURAL')
                    fig = plt.figure()
                    ax = fig.add_subplot(1,2,1)
                    ax.spy(LU.L.A, markersize=1, markeredgewidth=0.0)
                    ax = fig.add_subplot(1,2,2)
                    ax.spy(LU.U.A, markersize=1, markeredgewidth=0.0)
                    fig.savefig(data_dir+"sparsity_pattern_LU.png", dpi=1200)
                    #fig.savefig(data_dir+"sparsity_pattern_LU.svg", format="svg")

                    logger.info("{} nonzero entries in LU".format(LU.nnz))
                    logger.info("{} nonzero entries in LHS".format(solver.pencils[0].LHS.tocsc().nnz))
                    logger.info("{} fill in factor".format(LU.nnz/solver.pencils[0].LHS.tocsc().nnz))
                first_step = False
                start_time = time.time()

    except:
        logger.error('Exception raised, triggering end of main loop.')
        raise
    finally:
        end_time = time.time()

        # Print statistics
        elapsed_time = end_time - start_time
        elapsed_sim_time = solver.sim_time
        N_iterations = solver.iteration - 1
        logger.info('main loop time: {:e}'.format(elapsed_time))
        logger.info('Iterations: {:d}'.format(N_iterations))
        logger.info('iter/sec: {:g}'.format(N_iterations/(elapsed_time)))
        if N_iterations > 0:
            logger.info('Average timestep: {:e}'.format(elapsed_sim_time / N_iterations))
        
        logger.info('beginning join operation')
        try:
            final_checkpoint = Checkpoint(data_dir, checkpoint_name='final_checkpoint')
            final_checkpoint.set_checkpoint(solver, wall_dt=1, mode="append")
            solver.step(dt) #clean this up in the future...works for now.
            post.merge_process_files(data_dir+'/final_checkpoint/')
        except:
            print('cannot save final checkpoint')

        if not(no_join):
            logger.info(data_dir+'/checkpoint/')
            post.merge_process_files(data_dir+'/checkpoint/')

            for task in analysis_tasks:
                logger.info(analysis_tasks[task].base_path)
                post.merge_process_files(analysis_tasks[task].base_path)

        if (atmosphere.domain.distributor.rank==0):
 
            N_TOTAL_CPU = atmosphere.domain.distributor.comm_cart.size

            # Print statistics
            print('-' * 40)
            total_time = end_time-initial_time
            main_loop_time = end_time - start_time
            startup_time = start_time-initial_time
            n_steps = solver.iteration-1
            print('  startup time:', startup_time)
            print('main loop time:', main_loop_time)
            print('    total time:', total_time)
            if n_steps > 0:
                print('    iterations:', n_steps)
                print(' loop sec/iter:', main_loop_time/n_steps)
                print('    average dt:', solver.sim_time/n_steps)
                print("          N_cores, Nx, Nz, startup     main loop,   main loop/iter, main loop/iter/grid, n_cores*main loop/iter/grid")
                print('scaling:',
                    ' {:d} {:d} {:d}'.format(N_TOTAL_CPU,nx,nz),
                    ' {:8.3g} {:8.3g} {:8.3g} {:8.3g} {:8.3g}'.format(startup_time,
                                                                    main_loop_time, 
                                                                    main_loop_time/n_steps, 
                                                                    main_loop_time/n_steps/(nx*nz), 
                                                                    N_TOTAL_CPU*main_loop_time/n_steps/(nx*nz)))
            print('-' * 40)
    return data_dir
Ejemplo n.º 21
0
import numpy as np
import h5py
from dedalus import public as de
from dedalus.extras import flow_tools
import time
from IPython import display
import sys

from dedalus.tools import post
print('merging files from')
print(sys.argv[1])
post.merge_process_files(sys.argv[1], cleanup=True)
#when merging the file name and subfiles must share a common name
#in other words one cannot rename the top level directory into something else and expect it to work

Ejemplo n.º 22
0
def Rayleigh_Benard(Rayleigh=1e6,
                    Prandtl=1,
                    nz=64,
                    nx=None,
                    ny=None,
                    aspect=4,
                    fixed_flux=False,
                    fixed_T=False,
                    mixed_flux_T=True,
                    stress_free=False,
                    no_slip=True,
                    restart=None,
                    run_time=23.5,
                    run_time_buoyancy=None,
                    run_time_iter=np.inf,
                    run_time_therm=1,
                    max_writes=20,
                    max_slice_writes=20,
                    output_dt=0.2,
                    data_dir='./',
                    coeff_output=True,
                    verbose=False,
                    no_join=False,
                    do_bvp=False,
                    num_bvps=10,
                    bvp_convergence_factor=1e-3,
                    bvp_equil_time=10,
                    bvp_resolution_factor=1,
                    bvp_transient_time=30,
                    bvp_final_equil_time=None,
                    min_bvp_time=50,
                    first_bvp_time=20,
                    first_bvp_convergence_factor=1e-2,
                    threeD=False,
                    seed=42,
                    mesh=None,
                    overwrite=False):
    import os
    from dedalus.tools.config import config

    config['logging']['filename'] = os.path.join(data_dir, 'logs/dedalus_log')
    config['logging']['file_level'] = 'DEBUG'

    import mpi4py.MPI
    if mpi4py.MPI.COMM_WORLD.rank == 0:
        if not os.path.exists('{:s}/'.format(data_dir)):
            os.makedirs('{:s}/'.format(data_dir))
        logdir = os.path.join(data_dir, 'logs')
        if not os.path.exists(logdir):
            os.mkdir(logdir)
    logger = logging.getLogger(__name__)
    logger.info("saving run in: {}".format(data_dir))

    import time
    from dedalus import public as de
    from dedalus.extras import flow_tools
    from dedalus.tools import post

    # input parameters
    logger.info("Ra = {}, Pr = {}".format(Rayleigh, Prandtl))

    # Parameters
    Lz = 1.
    Lx = aspect * Lz
    Ly = aspect * Lz
    if nx is None:
        nx = int(nz * aspect)
    if ny is None:
        ny = int(nz * aspect)

    if threeD:
        logger.info("resolution: [{}x{}x{}]".format(nx, ny, nz))
        equations = BoussinesqEquations3D(nx=nx,
                                          ny=ny,
                                          nz=nz,
                                          Lx=Lx,
                                          Ly=Ly,
                                          Lz=Lz,
                                          mesh=mesh)
    else:
        logger.info("resolution: [{}x{}]".format(nx, nz))
        equations = BoussinesqEquations2D(nx=nx, nz=nz, Lx=Lx, Lz=Lz)
    equations.set_IVP(Rayleigh, Prandtl)

    bc_dict = {
        'fixed_flux': None,
        'fixed_temperature': None,
        'mixed_flux_temperature': None,
        'mixed_temperature_flux': None,
        'stress_free': None,
        'no_slip': None
    }
    if mixed_flux_T:
        bc_dict['mixed_flux_temperature'] = True
    elif fixed_T:
        bc_dict['fixed_temperature'] = True
    elif fixed_flux:
        bc_dict['fixed_flux'] = True

    if stress_free:
        bc_dict['stress_free'] = True
    elif no_slip:
        bc_dict['no_slip'] = True

    supercrit = Rayleigh / RA_CRIT

    equations.set_BC(**bc_dict)

    # Build solver
    ts = de.timesteppers.RK443
    cfl_safety = 0.8

    solver = equations.problem.build_solver(ts)
    logger.info('Solver built')

    checkpoint = Checkpoint(data_dir)
    if isinstance(restart, type(None)):
        equations.set_IC(solver, seed=seed)
        dt = None
        mode = 'overwrite'
    else:
        logger.info("restarting from {}".format(restart))
        checkpoint.restart(restart, solver)
        if overwrite:
            mode = 'overwrite'
        else:
            mode = 'append'
    checkpoint.set_checkpoint(solver, wall_dt=checkpoint_min * 60, mode=mode)

    # Integration parameters
    #    if not isinstance(run_time_therm, type(None)):
    #        solver.stop_sim_time = run_time_therm*equations.thermal_time + solver.sim_time
    #    elif not isinstance(run_time_buoyancy, type(None)):
    #        solver.stop_sim_time  = run_time_buoyancy + solver.sim_time
    #    else:
    solver.stop_sim_time = np.inf
    solver.stop_wall_time = run_time * 3600.
    solver.stop_iteration = run_time_iter
    Hermitian_cadence = 100

    # Analysis
    max_dt = output_dt
    analysis_tasks = equations.initialize_output(solver,
                                                 data_dir,
                                                 coeff_output=coeff_output,
                                                 output_dt=output_dt,
                                                 mode=mode)

    # CFL
    CFL = flow_tools.CFL(solver,
                         initial_dt=0.1,
                         cadence=1,
                         safety=cfl_safety,
                         max_change=1.5,
                         min_change=0.5,
                         max_dt=max_dt,
                         threshold=0.1)
    if threeD:
        CFL.add_velocities(('u', 'v', 'w'))
    else:
        CFL.add_velocities(('u', 'w'))

    # Flow properties
    flow = flow_tools.GlobalFlowProperty(solver, cadence=1)
    flow.add_property("Re", name='Re')
    #    flow.add_property("interp(w, z=0.95)", name='w near top')

    #    u, v, w = solver.state['u'], solver.state['v'], solver.state['w']

    if do_bvp:
        if not threeD:
            ny = 0
            atmo_class = BoussinesqEquations2D
        else:
            atmo_class = BoussinesqEquations3D
        bvp_solver = BoussinesqBVPSolver(atmo_class, nx, ny, nz, \
                                   flow, equations.domain.dist.comm_cart, \
                                   solver, num_bvps, bvp_equil_time, \
                                   threeD=threeD,
                                   bvp_transient_time=bvp_transient_time, \
                                   bvp_run_threshold=bvp_convergence_factor, \
                                   bvp_l2_check_time=1, mesh=mesh,\
                                   first_bvp_time=first_bvp_time,
                                   first_run_threshold=first_bvp_convergence_factor,\
                                   plot_dir='{}/bvp_plots/'.format(data_dir),\
                                   min_avg_dt=1e-10, final_equil_time=bvp_final_equil_time,
                                   min_bvp_time=min_bvp_time)
        bc_dict.pop('stress_free')
        bc_dict.pop('no_slip')

#   print(equations.domain.grid(0), equations.domain.grid(1), equations.domain.grid(2))

    first_step = True
    # Main loop
    try:
        logger.info('Starting loop')
        Re_avg = 0
        continue_bvps = True
        not_corrected_times = True
        init_time = solver.sim_time
        start_iter = solver.iteration
        while (solver.ok and np.isfinite(Re_avg)) and continue_bvps:
            dt = CFL.compute_dt()
            solver.step(dt)  #, trim=True)

            # Solve for blow-up over long timescales in 3D due to hermitian-ness
            effective_iter = solver.iteration - start_iter
            if threeD and effective_iter % Hermitian_cadence == 0:
                for field in solver.state.fields:
                    field.require_grid_space()

            Re_avg = flow.grid_average('Re')
            log_string = 'Iteration: {:5d}, '.format(solver.iteration)
            log_string += 'Time: {:8.3e} ({:8.3e} therm), dt: {:8.3e}, '.format(
                solver.sim_time, solver.sim_time / equations.thermal_time, dt)
            log_string += 'Re: {:8.3e}/{:8.3e}'.format(Re_avg, flow.max('Re'))
            logger.info(log_string)

            if not_corrected_times and Re_avg > 1:
                if not isinstance(run_time_therm, type(None)):
                    solver.stop_sim_time = run_time_therm * equations.thermal_time + solver.sim_time
                elif not isinstance(run_time_buoyancy, type(None)):
                    solver.stop_sim_time = run_time_buoyancy + solver.sim_time
                not_corrected_times = False

            if do_bvp:
                bvp_solver.update_avgs(dt, Re_avg, min_Re=np.sqrt(supercrit))
                if bvp_solver.check_if_solve():
                    atmo_kwargs = {'nz': nz * bvp_resolution_factor, 'Lz': Lz}
                    diff_args = [Rayleigh, Prandtl]
                    bvp_solver.solve_BVP(atmo_kwargs, diff_args, bc_dict)
                if bvp_solver.terminate_IVP():
                    continue_bvps = False

            if first_step:
                if verbose:
                    import matplotlib
                    matplotlib.use('Agg')
                    import matplotlib.pyplot as plt
                    fig = plt.figure()
                    ax = fig.add_subplot(1, 1, 1)
                    ax.spy(solver.pencils[0].L,
                           markersize=1,
                           markeredgewidth=0.0)
                    fig.savefig(data_dir + "sparsity_pattern.png", dpi=1200)

                    import scipy.sparse.linalg as sla
                    LU = sla.splu(solver.pencils[0].LHS.tocsc(),
                                  permc_spec='NATURAL')
                    fig = plt.figure()
                    ax = fig.add_subplot(1, 2, 1)
                    ax.spy(LU.L.A, markersize=1, markeredgewidth=0.0)
                    ax = fig.add_subplot(1, 2, 2)
                    ax.spy(LU.U.A, markersize=1, markeredgewidth=0.0)
                    fig.savefig(data_dir + "sparsity_pattern_LU.png", dpi=1200)

                    logger.info("{} nonzero entries in LU".format(LU.nnz))
                    logger.info("{} nonzero entries in LHS".format(
                        solver.pencils[0].LHS.tocsc().nnz))
                    logger.info("{} fill in factor".format(
                        LU.nnz / solver.pencils[0].LHS.tocsc().nnz))
                first_step = False
                start_time = time.time()
    except:
        raise
        logger.error('Exception raised, triggering end of main loop.')
    finally:
        end_time = time.time()
        main_loop_time = end_time - start_time
        n_iter_loop = solver.iteration - 1
        logger.info('Iterations: {:d}'.format(n_iter_loop))
        logger.info('Sim end time: {:f}'.format(solver.sim_time))
        logger.info('Run time: {:f} sec'.format(main_loop_time))
        logger.info('Run time: {:f} cpu-hr'.format(
            main_loop_time / 60 / 60 * equations.domain.dist.comm_cart.size))
        logger.info('iter/sec: {:f} (main loop only)'.format(n_iter_loop /
                                                             main_loop_time))
        try:
            final_checkpoint = Checkpoint(data_dir,
                                          checkpoint_name='final_checkpoint')
            final_checkpoint.set_checkpoint(solver, wall_dt=1, mode=mode)
            solver.step(dt)  #clean this up in the future...works for now.
            post.merge_process_files(data_dir + '/final_checkpoint/',
                                     cleanup=False)
        except:
            raise
            print('cannot save final checkpoint')
        finally:
            if not no_join:
                logger.info('beginning join operation')
                post.merge_analysis(data_dir + 'checkpoints')

                for task in analysis_tasks:
                    logger.info(task.base_path)
                    post.merge_analysis(task.base_path)

            logger.info(40 * "=")
            logger.info('Iterations: {:d}'.format(n_iter_loop))
            logger.info('Sim end time: {:f}'.format(solver.sim_time))
            logger.info('Run time: {:f} sec'.format(main_loop_time))
            logger.info('Run time: {:f} cpu-hr'.format(
                main_loop_time / 60 / 60 *
                equations.domain.dist.comm_cart.size))
            logger.info('iter/sec: {:f} (main loop only)'.format(
                n_iter_loop / main_loop_time))
Ejemplo n.º 23
0
# Main loop ------------------------------------------------------------------|
try:
    logger.info('Starting loop')
    start_time = time.time()
    while solver.ok:
        # We start with a very small dt and increase it.
        dt = dtm + (dtM - dtm) * (1 - np.exp(-sigma_dt * solver.sim_time))
        solver.step(dt)
        if (solver.iteration - 1) % 100 == 0:
            logger.info('Ite #{0:d}, Time: {1:.2e} IPs, dt: {2:e}'.format(
                solver.iteration, solver.sim_time / TF, dt))
except NameError:
    logger.error('Exception raised, triggering end of main loop.')
    raise
finally:
    end_time = time.time()
    logger.info('Iterations: {0:d}'.format(solver.iteration))
    logger.info('Sim end time: {0:f}'.format(solver.sim_time))
    logger.info('Run time: {0:.2f} sec'.format(end_time - start_time))
    logger.info('Run time: {0:f} cpu-min'.format(
        (end_time - start_time) / 30 * domain.dist.comm_cart.size))

# Post-processing ------------------------------------------------------------|
print(subprocess.check_output("find anim", shell=True).decode())
post.merge_process_files("anim", cleanup=True)  # merges the sub-domains if any
set_paths = list(
    pathlib.Path("anim").glob("anim_s*.h5"))  # finds all of the time series
# merges the time series
post.merge_sets("anim/anim.h5", set_paths, cleanup=True)
print(subprocess.check_output("find anim", shell=True).decode())