def main():
    """
    A simple test program to do PFASST runs for the heat equation
    """

    # set MPI communicator
    comm = MPI.COMM_WORLD

    world_rank = comm.Get_rank()
    world_size = comm.Get_size()

    # split world communicator to create space-communicators
    if len(sys.argv) >= 2:
        color = int(world_rank / int(sys.argv[1]))
    else:
        color = int(world_rank / 1)
    space_comm = comm.Split(color=color)
    space_size = space_comm.Get_size()
    space_rank = space_comm.Get_rank()

    # split world communicator to create time-communicators
    if len(sys.argv) >= 2:
        color = int(world_rank % int(sys.argv[1]))
    else:
        color = int(world_rank / world_size)
    time_comm = comm.Split(color=color)
    time_size = time_comm.Get_size()
    time_rank = time_comm.Get_rank()

    print("IDs (world, space, time):  %i / %i -- %i / %i -- %i / %i" % (world_rank, world_size, space_rank, space_size,
                                                                        time_rank, time_size))

    # initialize level parameters
    level_params = dict()
    level_params['restol'] = 1E-08
    level_params['dt'] = 0.0001
    level_params['nsweeps'] = [1]

    # initialize sweeper parameters
    sweeper_params = dict()
    sweeper_params['collocation_class'] = CollGaussRadau_Right
    # sweeper_params['collocation_class'] = EquidistantNoLeft
    sweeper_params['num_nodes'] = [1]
    sweeper_params['QI'] = ['LU']  # For the IMEX sweeper, the LU-trick can be activated for the implicit part
    sweeper_params['initial_guess'] = 'spread'

    # initialize problem parameters
    problem_params = dict()
    problem_params['Ra'] = 1E+05
    problem_params['Pr'] = 1.0
    problem_params['initial'] = 'random'
    problem_params['nvars'] = [(64, 32)]  # number of degrees of freedom for each level
    problem_params['comm'] = space_comm

    # initialize step parameters
    step_params = dict()
    step_params['maxiter'] = 1
    # step_params['errtol'] = 1E-07

    # initialize controller parameters
    controller_params = dict()
    controller_params['logger_level'] = 20 if space_rank == 0 else 99
    controller_params['hook_class'] = monitor
    # controller_params['use_iteration_estimator'] = True

    # fill description dictionary for easy step instantiation
    description = dict()
    description['problem_class'] = rayleighbenard_2d_dedalus
    description['problem_params'] = problem_params  # pass problem parameters
    description['sweeper_class'] = imex_1st_order
    description['sweeper_params'] = sweeper_params  # pass sweeper parameters
    description['level_params'] = level_params  # pass level parameters
    description['step_params'] = step_params  # pass step parameters
    description['space_transfer_class'] = dedalus_field_transfer
    # description['space_transfer_params'] = space_transfer_params  # pass paramters for spatial transfer

    # set time parameters
    t0 = 0.0
    Tend = 0.1

    # instantiate controller
    controller = controller_MPI(controller_params=controller_params, description=description, comm=time_comm)

    # get initial values on finest level
    P = controller.S.levels[0].prob
    uinit = P.u_exact(t0)

    # call main function to get things done...
    uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend)
def run_SDC_variant(variant=None):
    """
    Routine to run particular SDC variant

    Args:
        variant (str): string describing the variant

    Returns:
        timing (float)
        niter (float)
    """

    # load (incomplete) default parameters
    description, controller_params = setup_parameters()

    # add stuff based on variant
    if variant == 'semi-implicit':
        description['problem_class'] = allencahn2d_imex
        description['sweeper_class'] = imex_1st_order
    elif variant == 'semi-implicit-stab':
        description['problem_class'] = allencahn2d_imex_stab
        description['sweeper_class'] = imex_1st_order
    else:
        raise NotImplemented('Wrong variant specified, got %s' % variant)

    # setup parameters "in time"
    t0 = 0.0
    Tend = 0.02

    # set MPI communicator
    comm = MPI.COMM_WORLD

    world_rank = comm.Get_rank()
    world_size = comm.Get_size()

    # split world communicator to create space-communicators
    if len(sys.argv) >= 2:
        color = int(world_rank / int(sys.argv[1]))
    else:
        color = int(world_rank / 1)
    space_comm = comm.Split(color=color)
    space_size = space_comm.Get_size()
    space_rank = space_comm.Get_rank()

    # split world communicator to create time-communicators
    if len(sys.argv) >= 2:
        color = int(world_rank % int(sys.argv[1]))
    else:
        color = int(world_rank / world_size)
    time_comm = comm.Split(color=color)
    time_size = time_comm.Get_size()
    time_rank = time_comm.Get_rank()

    print(
        "IDs (world, space, time):  %i / %i -- %i / %i -- %i / %i" %
        (world_rank, world_size, space_rank, space_size, time_rank, time_size))

    description['problem_params']['comm'] = space_comm
    # set level depending on rank
    controller_params['logger_level'] = controller_params[
        'logger_level'] if space_rank == 0 else 99

    # instantiate controller
    controller = controller_MPI(controller_params=controller_params,
                                description=description,
                                comm=time_comm)

    # get initial values on finest level
    P = controller.S.levels[0].prob
    uinit = P.u_exact(t0)

    # if time_rank == 0:
    #     plt_helper.plt.imshow(uinit.values)
    #     plt_helper.savefig(f'uinit_{space_rank}', save_pdf=False, save_pgf=False, save_png=True)
    # exit()

    # call main function to get things done...
    uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend)

    # if time_rank == 0:
    #     plt_helper.plt.imshow(uend.values)
    #     plt_helper.savefig(f'uend_{space_rank}', save_pdf=False, save_pgf=False, save_png=True)
    # exit()

    rank = comm.Get_rank()

    # filter statistics by variant (number of iterations)
    filtered_stats = filter_stats(stats, type='niter')

    # convert filtered statistics to list of iterations count, sorted by process
    iter_counts = sort_stats(filtered_stats, sortby='time')

    # compute and print statistics
    niters = np.array([item[1] for item in iter_counts])
    print(f'Mean number of iterations on rank {rank}: {np.mean(niters):.4f}')

    if rank == 0:
        timing = sort_stats(filter_stats(stats, type='timing_run'),
                            sortby='time')

        print(f'---> Time to solution: {timing[0][1]:.4f} sec.')
        print()

    return stats
Ejemplo n.º 3
0
def main():
    """
    A simple test program to do PFASST runs for the heat equation
    """

    # set MPI communicator
    comm = MPI.COMM_WORLD

    world_rank = comm.Get_rank()
    world_size = comm.Get_size()

    # split world communicator to create space-communicators
    if len(sys.argv) >= 2:
        color = int(world_rank / int(sys.argv[1]))
    else:
        color = int(world_rank / 1)
    space_comm = comm.Split(color=color)
    space_size = space_comm.Get_size()
    space_rank = space_comm.Get_rank()

    # split world communicator to create time-communicators
    if len(sys.argv) >= 2:
        color = int(world_rank % int(sys.argv[1]))
    else:
        color = int(world_rank / world_size)
    time_comm = comm.Split(color=color)
    time_size = time_comm.Get_size()
    time_rank = time_comm.Get_rank()

    print(
        "IDs (world, space, time):  %i / %i -- %i / %i -- %i / %i" %
        (world_rank, world_size, space_rank, space_size, time_rank, time_size))

    # initialize level parameters
    level_params = dict()
    level_params['restol'] = 1E-08
    level_params['dt'] = 1.0 / 4
    level_params['nsweeps'] = [1]

    # initialize sweeper parameters
    sweeper_params = dict()
    sweeper_params['collocation_class'] = CollGaussRadau_Right
    sweeper_params['num_nodes'] = [3]
    sweeper_params['QI'] = [
        'LU'
    ]  # For the IMEX sweeper, the LU-trick can be activated for the implicit part
    # sweeper_params['spread'] = False

    # initialize problem parameters
    problem_params = dict()
    problem_params['nu'] = 0.1  # diffusion coefficient
    problem_params['freq'] = 2  # frequency for the test value
    problem_params['nvars'] = [(16, 16), (4, 4)
                               ]  # number of degrees of freedom for each level
    problem_params['comm'] = space_comm

    # initialize step parameters
    step_params = dict()
    step_params['maxiter'] = 10

    # initialize controller parameters
    controller_params = dict()
    controller_params[
        'logger_level'] = 20 if space_rank == 0 else 99  # set level depending on rank
    # controller_params['hook_class'] = error_output

    # fill description dictionary for easy step instantiation
    description = dict()
    description['problem_class'] = heat2d_dedalus_forced
    description['problem_params'] = problem_params  # pass problem parameters
    description['sweeper_class'] = imex_1st_order
    description['sweeper_params'] = sweeper_params  # pass sweeper parameters
    description['level_params'] = level_params  # pass level parameters
    description['step_params'] = step_params  # pass step parameters
    description['space_transfer_class'] = dedalus_field_transfer
    # description['space_transfer_params'] = space_transfer_params  # pass paramters for spatial transfer

    # set time parameters
    t0 = 0.0
    Tend = 1.0

    # instantiate controller
    controller = controller_MPI(controller_params=controller_params,
                                description=description,
                                comm=time_comm)

    # get initial values on finest level
    P = controller.S.levels[0].prob
    uinit = P.u_exact(t0)

    # call main function to get things done...
    uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend)

    # compute exact solution and compare
    uex = P.u_exact(Tend)
    err = abs(uex - uend)

    # filter statistics by type (number of iterations)
    filtered_stats = filter_stats(stats, type='niter')

    # convert filtered statistics to list of iterations count, sorted by process
    iter_counts = sort_stats(filtered_stats, sortby='time')

    if space_rank == 0:

        out = 'This is time-rank %i...' % time_rank
        print(out)
        # compute and print statistics
        for item in iter_counts:
            out = 'Number of iterations for time %4.2f: %2i' % item
            print(out)

        niters = np.array([item[1] for item in iter_counts])
        out = '   Mean number of iterations: %4.2f' % np.mean(niters)
        print(out)
        out = '   Range of values for number of iterations: %2i ' % np.ptp(
            niters)
        print(out)
        out = '   Position of max/min number of iterations: %2i -- %2i' % \
              (int(np.argmax(niters)), int(np.argmin(niters)))
        print(out)
        out = '   Std and var for number of iterations: %4.2f -- %4.2f' % (
            float(np.std(niters)), float(np.var(niters)))
        print(out)

        timing = sort_stats(filter_stats(stats, type='timing_run'),
                            sortby='time')

        out = 'Time to solution: %6.4f sec.' % timing[0][1]
        print(out)

        print('Error: %8.4e' % err)
Ejemplo n.º 4
0
def main():

    # set MPI communicator
    comm = MPI.COMM_WORLD

    world_rank = comm.Get_rank()
    world_size = comm.Get_size()

    if len(sys.argv) == 2:
        color = int(world_rank / int(sys.argv[1]))
    else:
        color = int(world_rank / 1)

    space_comm = comm.Split(color=color)
    space_rank = space_comm.Get_rank()
    space_size = space_comm.Get_size()

    if len(sys.argv) == 2:
        color = int(world_rank % int(sys.argv[1]))
    else:
        color = int(world_rank / world_size)

    time_comm = comm.Split(color=color)
    time_rank = time_comm.Get_rank()
    time_size = time_comm.Get_size()

    print(
        "IDs (world, space, time):  %i / %i -- %i / %i -- %i / %i" %
        (world_rank, world_size, space_rank, space_size, time_rank, time_size))

    # initialize level parameters
    level_params = dict()
    level_params['restol'] = 1E-08
    level_params['dt'] = 0.125
    level_params['nsweeps'] = [1]

    # initialize sweeper parameters
    sweeper_params = dict()
    sweeper_params['collocation_class'] = CollGaussRadau_Right
    sweeper_params['num_nodes'] = [3]
    sweeper_params['QI'] = [
        'LU'
    ]  # For the IMEX sweeper, the LU-trick can be activated for the implicit part
    sweeper_params['initial_guess'] = 'zero'

    # initialize problem parameters
    problem_params = dict()
    problem_params['nu'] = 1.0  # diffusion coefficient
    problem_params['freq'] = 2  # frequency for the test value
    problem_params['nvars'] = [(127, 127)
                               ]  # number of degrees of freedom for each level
    problem_params['comm'] = space_comm
    problem_params['sol_tol'] = 1E-12

    # initialize step parameters
    step_params = dict()
    step_params['maxiter'] = 50

    # initialize space transfer parameters
    # space_transfer_params = dict()
    # space_transfer_params['rorder'] = 2
    # space_transfer_params['iorder'] = 2
    # space_transfer_params['periodic'] = True

    # initialize controller parameters
    controller_params = dict()
    controller_params['logger_level'] = 20
    # controller_params['hook_class'] = error_output

    # fill description dictionary for easy step instantiation
    description = dict()
    description['problem_class'] = heat2d_petsc_forced  # pass problem class
    description['problem_params'] = problem_params  # pass problem parameters
    description['sweeper_class'] = imex_1st_order  # pass sweeper (see part B)
    description['sweeper_params'] = sweeper_params  # pass sweeper parameters
    description['level_params'] = level_params  # pass level parameters
    description['step_params'] = step_params  # pass step parameters
    description[
        'space_transfer_class'] = mesh_to_mesh_petsc_dmda  # pass spatial transfer class
    # description['space_transfer_params'] = space_transfer_params  # pass paramters for spatial transfer

    # set time parameters
    t0 = 0.0
    Tend = 1.0

    # instantiate controller
    controller = controller_MPI(controller_params=controller_params,
                                description=description,
                                comm=time_comm)
    # controller = controller_nonMPI(num_procs=2, controller_params=controller_params, description=description)

    # get initial values on finest level
    P = controller.S.levels[0].prob
    uinit = P.u_exact(t0)

    # call main function to get things done...
    uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend)

    # compute exact solution and compare
    uex = P.u_exact(Tend)
    err = abs(uex - uend)

    print(err)

    # filter statistics by type (number of iterations)
    filtered_stats = filter_stats(stats, type='niter')

    # convert filtered statistics to list of iterations count, sorted by process
    iter_counts = sort_stats(filtered_stats, sortby='time')

    # compute and print statistics
    for item in iter_counts:
        out = 'Number of iterations for time %4.2f: %2i' % item
        print(out)

    niters = np.array([item[1] for item in iter_counts])
    out = '   Mean number of iterations: %4.2f' % np.mean(niters)
    print(out)
    out = '   Range of values for number of iterations: %2i ' % np.ptp(niters)
    print(out)
    out = '   Position of max/min number of iterations: %2i -- %2i' % \
          (int(np.argmax(niters)), int(np.argmin(niters)))
    print(out)
    out = '   Std and var for number of iterations: %4.2f -- %4.2f' % (float(
        np.std(niters)), float(np.var(niters)))
    print(out)

    timing = sort_stats(filter_stats(stats, type='timing_run'), sortby='time')

    print(timing)
Ejemplo n.º 5
0
def run_simulation(name=''):
    """
    A simple test program to do PFASST runs for the AC equation
    """

    # set MPI communicator
    comm = MPI.COMM_WORLD

    world_rank = comm.Get_rank()
    world_size = comm.Get_size()

    # split world communicator to create space-communicators
    if len(sys.argv) >= 2:
        color = int(world_rank / int(sys.argv[1]))
    else:
        color = int(world_rank / 1)
    space_comm = comm.Split(color=color)
    # space_size = space_comm.Get_size()
    space_rank = space_comm.Get_rank()

    # split world communicator to create time-communicators
    if len(sys.argv) >= 2:
        color = int(world_rank % int(sys.argv[1]))
    else:
        color = int(world_rank / world_size)
    time_comm = comm.Split(color=color)
    # time_size = time_comm.Get_size()
    time_rank = time_comm.Get_rank()

    # print("IDs (world, space, time):  %i / %i -- %i / %i -- %i / %i" % (world_rank, world_size, space_rank,
    #                                                                     space_size, time_rank, time_size))

    # initialize level parameters
    level_params = dict()
    level_params['restol'] = 1E-08
    level_params['dt'] = 1E-03
    level_params['nsweeps'] = [3, 1]

    # initialize sweeper parameters
    sweeper_params = dict()
    sweeper_params['collocation_class'] = CollGaussRadau_Right
    sweeper_params['num_nodes'] = [3]
    sweeper_params['QI'] = [
        'LU'
    ]  # For the IMEX sweeper, the LU-trick can be activated for the implicit part
    sweeper_params['initial_guess'] = 'zero'

    # initialize problem parameters
    problem_params = dict()
    problem_params['L'] = 1.0
    problem_params['nvars'] = [(128, 128), (32, 32)]
    problem_params['eps'] = [0.04]
    problem_params['dw'] = [-23.6]
    problem_params['radius'] = 0.25
    problem_params['comm'] = space_comm
    problem_params['name'] = name
    problem_params['init_type'] = 'circle'
    problem_params['spectral'] = False

    # initialize step parameters
    step_params = dict()
    step_params['maxiter'] = 50

    # initialize controller parameters
    controller_params = dict()
    controller_params[
        'logger_level'] = 20 if space_rank == 0 else 99  # set level depending on rank
    controller_params['hook_class'] = dump

    # fill description dictionary for easy step instantiation
    description = dict()
    # description['problem_class'] = allencahn_imex
    description['problem_class'] = allencahn_imex_timeforcing
    description['problem_params'] = problem_params  # pass problem parameters
    description['sweeper_class'] = imex_1st_order
    description['sweeper_params'] = sweeper_params  # pass sweeper parameters
    description['level_params'] = level_params  # pass level parameters
    description['step_params'] = step_params  # pass step parameters
    description['space_transfer_class'] = fft_to_fft

    # set time parameters
    t0 = 0.0
    Tend = 32 * 0.001

    # instantiate controller
    controller = controller_MPI(controller_params=controller_params,
                                description=description,
                                comm=time_comm)

    # get initial values on finest level
    P = controller.S.levels[0].prob
    uinit = P.u_exact(t0)

    # call main function to get things done...
    uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend)

    if space_rank == 0:

        # filter statistics by type (number of iterations)
        filtered_stats = filter_stats(stats, type='niter')

        # convert filtered statistics to list of iterations count, sorted by process
        iter_counts = sort_stats(filtered_stats, sortby='time')

        print()

        niters = np.array([item[1] for item in iter_counts])
        out = f'Mean number of iterations on rank {time_rank}: {np.mean(niters):.4f}'
        print(out)

        timing = sort_stats(filter_stats(stats, type='timing_setup'),
                            sortby='time')
        out = f'Setup time on rank {time_rank}: {timing[0][1]:.4f} sec.'
        print(out)

        timing = sort_stats(filter_stats(stats, type='timing_run'),
                            sortby='time')
        out = f'Time to solution on rank {time_rank}: {timing[0][1]:.4f} sec.'
        print(out)

        print()

        # convert filtered statistics to list of computed radii, sorted by time
        computed_radii = sort_stats(filter_stats(stats,
                                                 type='computed_radius'),
                                    sortby='time')
        exact_radii = sort_stats(filter_stats(stats, type='exact_radius'),
                                 sortby='time')

        # print radii and error over time
        for cr, er in zip(computed_radii, exact_radii):
            if er[1] > 0:
                err = abs(cr[1] - er[1]) / er[1]
            else:
                err = 1.0
            out = f'Computed/exact/error radius for time {cr[0]:6.4f}: ' \
                  f'{cr[1]:6.4f} / {er[1]:6.4f} / {err:6.4e}'
            print(out)
Ejemplo n.º 6
0
def main():
    """
    A simple test program to do PFASST runs for the heat equation
    """

    # set MPI communicator
    comm = MPI.COMM_WORLD

    world_rank = comm.Get_rank()
    world_size = comm.Get_size()

    # split world communicator to create space-communicators
    if len(sys.argv) >= 2:
        color = int(world_rank / int(sys.argv[1]))
    else:
        color = int(world_rank / 1)
    space_comm = comm.Split(color=color)
    space_size = space_comm.Get_size()
    space_rank = space_comm.Get_rank()

    # split world communicator to create time-communicators
    if len(sys.argv) >= 2:
        color = int(world_rank % int(sys.argv[1]))
    else:
        color = int(world_rank / world_size)
    time_comm = comm.Split(color=color)
    time_size = time_comm.Get_size()
    time_rank = time_comm.Get_rank()

    print(
        "IDs (world, space, time):  %i / %i -- %i / %i -- %i / %i" %
        (world_rank, world_size, space_rank, space_size, time_rank, time_size))

    # initialize level parameters
    level_params = dict()
    level_params['restol'] = 1E-08
    level_params['dt'] = 0.5
    level_params['nsweeps'] = [1]

    # initialize sweeper parameters
    sweeper_params = dict()
    sweeper_params['collocation_class'] = CollGaussRadau_Right
    sweeper_params['num_nodes'] = [3]
    sweeper_params['QI'] = [
        'LU'
    ]  # For the IMEX sweeper, the LU-trick can be activated for the implicit part
    sweeper_params['initial_guess'] = 'zero'

    # initialize problem parameters
    problem_params = dict()
    problem_params['Rm'] = 4
    problem_params['kz'] = 0.45
    problem_params['initial'] = 'low-res'
    problem_params['nvars'] = [(64, 64)
                               ]  # number of degrees of freedom for each level
    problem_params['comm'] = space_comm

    # initialize step parameters
    step_params = dict()
    step_params['maxiter'] = 50
    # step_params['errtol'] = 1E-07

    # initialize controller parameters
    controller_params = dict()
    controller_params['logger_level'] = 20 if space_rank == 0 else 99
    controller_params['hook_class'] = monitor
    # controller_params['use_iteration_estimator'] = True

    # fill description dictionary for easy step instantiation
    description = dict()
    description['problem_class'] = dynamo_2d_dedalus
    description['problem_params'] = problem_params  # pass problem parameters
    description['sweeper_class'] = imex_1st_order
    description['sweeper_params'] = sweeper_params  # pass sweeper parameters
    description['level_params'] = level_params  # pass level parameters
    description['step_params'] = step_params  # pass step parameters
    description['space_transfer_class'] = dedalus_field_transfer
    # description['space_transfer_params'] = space_transfer_params  # pass paramters for spatial transfer

    # set time parameters
    t0 = 0.0
    Tend = 10.0

    # instantiate controller
    controller = controller_MPI(controller_params=controller_params,
                                description=description,
                                comm=time_comm)

    # get initial values on finest level
    P = controller.S.levels[0].prob
    uinit = P.u_exact(t0)

    # call main function to get things done...
    uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend)

    timings = sort_stats(filter_stats(stats, type='timing_run'),
                         sortby='time')[0][1]
    print(f'Time it took to run the simulation: {timings:6.3f} seconds')

    if space_size == 1:
        bx_maxes = sort_stats(filter_stats(stats, type='bx_max'),
                              sortby='time')

        times = [
            t0 + i * level_params['dt']
            for i in range(int((Tend - t0) / level_params['dt']) + 1)
        ]
        half = int(len(times) / 2)
        gr = np.polyfit(times[half::],
                        np.log([item[1] for item in bx_maxes])[half::], 1)[0]
        print("Growth rate: {:.3e}".format(gr))

        plt.figure(3)
        plt.semilogy(times, [item[1] for item in bx_maxes])
        plt.pause(0.1)
Ejemplo n.º 7
0
def run(sweeper_list, MPI_fake=True, controller_comm=MPI.COMM_WORLD, node_comm=None, node_list=None):

    # initialize level parameters
    level_params = dict()
    level_params['restol'] = 1E-10

    # This comes as read-in for the step class (this is optional!)
    step_params = dict()
    step_params['maxiter'] = 50

    # This comes as read-in for the problem class
    problem_params = dict()
    problem_params['nu'] = 2
    problem_params['nvars'] = [(256,256), (128, 128)]
    problem_params['eps'] = [0.04]
    problem_params['newton_maxiter'] = 1 #50
    problem_params['newton_tol'] = 1E-11
    problem_params['lin_tol'] = 1E-12
    problem_params['lin_maxiter'] = 450 #0
    problem_params['radius'] = 0.25
    problem_params['comm'] = node_comm #time_comm #MPI.COMM_WORLD #node_comm

    # This comes as read-in for the sweeper class
    sweeper_params = dict()
    sweeper_params['collocation_class'] = CollGaussRadau_Right
    sweeper_params['num_nodes'] = 4
    sweeper_params['QI'] = 'LU'
    sweeper_params['fixed_time_in_jacobian'] = 0
    sweeper_params['comm'] = node_comm
    sweeper_params['node_list'] = node_list

    # initialize controller parameters
    controller_params = dict()
    controller_params['logger_level'] = 30
    controller_params['hook_class'] = err_reduction_hook

    # Fill description dictionary for easy hierarchy creation
    description = dict()
    description['problem_class'] = AC_jac 
    description['problem_params'] = problem_params
    description['sweeper_params'] = sweeper_params
    description['step_params'] = step_params
    description['space_transfer_class'] = mesh_to_mesh

    #description['space_transfer_class'] = base_transfer_MPI #mesh_to_mesh

    #assert MPI.COMM_WORLD.Get_size() == sweeper_params['num_nodes']

    
    # setup parameters "in time"
    t0 = 0
    Tend = 0.024 



    # loop over the different sweepers and check results
    serial =True
    for sweeper in sweeper_list:
        description['sweeper_class'] = sweeper
        error_reduction = []
        for dt in [1e-3]:
            print('Working with sweeper %s and dt = %s...' % (sweeper.__name__, dt))

            level_params['dt'] = dt
            description['level_params'] = level_params

            # instantiate the controller and stuff
            if(sweeper.__name__=='generic_implicit'):
                if(controller_comm.Get_size()==1):
                    controller = controller_nonMPI(num_procs=1, controller_params=controller_params, description=description)
                    serial=False
                else:
                    controller = controller_MPI(controller_params=controller_params, description=description, comm=controller_comm)   
            elif (sweeper.__name__=='linearized_implicit_fixed_parallel_prec_MPI'or sweeper.__name__=="linearized_implicit_fixed_parallel_MPI" or sweeper.__name__=='linearized_implicit_fixed_parallel_prec'or sweeper.__name__=="linearized_implicit_fixed_parallel"):
                #if(node_comm is None):
                    #description['base_transfer_class'] = base_transfer                
                #controller = controller_MPI(controller_params=controller_params, description=description, comm=controller_comm)                  
                #else: 
                #    serial==False
                if (sweeper.__name__=='linearized_implicit_fixed_parallel_prec_MPI'or sweeper.__name__=="linearized_implicit_fixed_parallel_MPI"):
                    description['base_transfer_class'] = base_transfer_MPI
                    controller = controller_MPI(controller_params=controller_params, description=description, comm=controller_comm)
                else:
                    controller = controller_MPI(controller_params=controller_params, description=description, comm=controller_comm)
                    #controller = controller_nonMPI(num_procs=1, controller_params=controller_params, description=description)
                    #serial=False                
            elif (sweeper.__name__=='div_linearized_implicit_fixed_parallel_prec_MPI' or sweeper.__name__=='div_linearized_implicit_fixed_parallel_MPI'):    
                description['base_transfer_class'] = div_base_transfer_MPI
                controller = controller_MPI(controller_params=controller_params, description=description, comm=controller_comm)
                serial==False

            if(serial==True):
                P = controller.S.levels[0].prob            
            else:    
                P = controller.MS[0].levels[0].prob                        

            # get initial values on finest level
            uinit = P.u_exact(t0)

            
            # call main function to get things done...
            MPI.COMM_WORLD.Barrier()            
            t1 = MPI.Wtime()
            uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend)
            t2 = MPI.Wtime()          
            time =t2-t1   
            print( "My elapsed time is ", time)
            maxtime=time            
            maxtime = np.zeros(1, dtype='float64')
            local_time = np.max(time).astype('float64')
            MPI.COMM_WORLD.Allreduce(local_time,maxtime, op=MPI.MAX)           
            print( "Elapsed max time is ", maxtime[0])


            if(True): #control the solution    
                fname = 'ref_24.npz'
                loaded = np.load(fname)
                uref = loaded['uend']
                print("Fehler ", np.linalg.norm(uref-uend.values, np.inf))
                print("Abweichung vom Anfangswert ", np.linalg.norm(uinit.values-uend.values, np.inf))





            # compute and print statistics
            filtered_stats = filter_stats(stats, type='niter')
            iter_counts = sort_stats(filtered_stats, sortby='time')
            niters = np.array([item[1] for item in iter_counts])
            print("Iterationen SDC ", niters)
            print("Newton Iterationen ", P.newton_itercount)

            maxcount = np.zeros(1, dtype='float64')
            local_count = np.max(P.newton_itercount).astype('float64')
            MPI.COMM_WORLD.Allreduce(local_count,maxcount, op=MPI.MAX)
            print("maxiter ", maxcount[0])


 
            if(serial==False):

                for pp in controller.MS:
                    print("Newton Iter ", pp.levels[0].prob.newton_itercount)
                    #print("lineare Iter ", pp.levels[0].prob.linear_count)
                    #print("warning ", pp.levels[0].prob.warning_count)    
                    #print("time for linear solve ", pp.levels[0].prob.time_for_solve)
            else:

                print("Newton Iter ", controller.S.levels[0].prob.newton_itercount)
                #print("lineare Iter ", controller.S.levels[0].prob.linear_count)
                #print("warning ", controller.S.levels[0].prob.warning_count)    
                #print("time for linear solve ", controller.S.levels[0].prob.time_for_solve)
                
            

            MPI.COMM_WORLD.Barrier()     
            print("------------------------------------------------------------------------------------------------------------------------------------------")       
Ejemplo n.º 8
0
def run_variant(nsweeps):
    """
    Routine to run particular SDC variant

    Args:

    Returns:

    """

    # set MPI communicator
    comm = MPI.COMM_WORLD

    world_rank = comm.Get_rank()
    world_size = comm.Get_size()

    # split world communicator to create space-communicators
    if len(sys.argv) >= 3:
        color = int(world_rank / int(sys.argv[2]))
    else:
        color = int(world_rank / 1)
    space_comm = comm.Split(color=color)
    space_size = space_comm.Get_size()
    space_rank = space_comm.Get_rank()

    # split world communicator to create time-communicators
    if len(sys.argv) >= 3:
        color = int(world_rank % int(sys.argv[2]))
    else:
        color = int(world_rank / world_size)
    time_comm = comm.Split(color=color)
    time_size = time_comm.Get_size()
    time_rank = time_comm.Get_rank()

    print(
        "IDs (world, space, time):  %i / %i -- %i / %i -- %i / %i" %
        (world_rank, world_size, space_rank, space_size, time_rank, time_size))

    # load (incomplete) default parameters
    description, controller_params = setup_parameters(nsweeps=nsweeps)

    # setup parameters "in time"
    t0 = 0.0
    Tend = 0.032

    # instantiate controller
    controller = controller_MPI(controller_params=controller_params,
                                description=description,
                                comm=time_comm)

    # get initial values on finest level
    P = controller.S.levels[0].prob
    uinit = P.u_exact(t0)

    # call main function to get things done...
    uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend)

    # filter statistics by variant (number of iterations)
    filtered_stats = filter_stats(stats, type='niter')

    # convert filtered statistics to list of iterations count, sorted by process
    iter_counts = sort_stats(filtered_stats, sortby='time')

    # compute and print statistics
    niters = np.array([item[1] for item in iter_counts])
    out = '   Mean number of iterations: %4.2f' % np.mean(niters)
    print(out)
    out = '   Range of values for number of iterations: %2i ' % np.ptp(niters)
    print(out)
    out = '   Position of max/min number of iterations: %2i -- %2i' % \
          (int(np.argmax(niters)), int(np.argmin(niters)))
    print(out)
    out = '   Std and var for number of iterations: %4.2f -- %4.2f' % (float(
        np.std(niters)), float(np.var(niters)))
    print(out)

    timing = sort_stats(filter_stats(stats, type='timing_run'), sortby='time')

    maxtiming = comm.allreduce(sendobj=timing[0][1], op=MPI.MAX)

    if time_rank == time_size - 1 and space_rank == 0:
        print('Time to solution: %6.4f sec.' % maxtiming)

    # if time_rank == time_size - 1:
    #     fname = 'data/AC_reference_FFT_Tend{:.1e}'.format(Tend) + '.npz'
    #     loaded = np.load(fname)
    #     uref = loaded['uend']
    #
    #     err = np.linalg.norm(uref - uend.values, np.inf)
    #     print('Error vs. reference solution: %6.4e' % err)
    #     print()

    return stats
Ejemplo n.º 9
0
def main():
    """
    Program to demonstrate usage of PETSc data structures and spatial parallelization,
    combined with parallelization in time.
    """
    # set MPI communicator
    comm = MPI.COMM_WORLD

    world_rank = comm.Get_rank()
    world_size = comm.Get_size()

    # split world communicator to create space-communicators
    if len(sys.argv) >= 2:
        color = int(world_rank / int(sys.argv[1]))
    else:
        color = int(world_rank / 1)
    space_comm = comm.Split(color=color)
    space_size = space_comm.Get_size()
    space_rank = space_comm.Get_rank()

    # split world communicator to create time-communicators
    if len(sys.argv) >= 2:
        color = int(world_rank % int(sys.argv[1]))
    else:
        color = int(world_rank / world_size)
    time_comm = comm.Split(color=color)
    time_size = time_comm.Get_size()
    time_rank = time_comm.Get_rank()

    print(
        "IDs (world, space, time):  %i / %i -- %i / %i -- %i / %i" %
        (world_rank, world_size, space_rank, space_size, time_rank, time_size))

    # initialize level parameters
    level_params = dict()
    level_params['restol'] = 1E-08
    level_params['dt'] = 0.125
    level_params['nsweeps'] = [3, 1]

    # initialize sweeper parameters
    sweeper_params = dict()
    sweeper_params['collocation_class'] = CollGaussRadau_Right
    sweeper_params['num_nodes'] = [5]
    sweeper_params['QI'] = [
        'LU'
    ]  # For the IMEX sweeper, the LU-trick can be activated for the implicit part
    sweeper_params['initial_guess'] = 'zero'

    # initialize problem parameters
    problem_params = dict()
    problem_params['nu'] = 1.0  # diffusion coefficient
    problem_params['freq'] = 2  # frequency for the test value
    problem_params['cnvars'] = [
        (129, 129)
    ]  # number of degrees of freedom on coarse level
    problem_params['refine'] = [1, 0]  # number of refinements
    problem_params[
        'comm'] = space_comm  # pass space-communicator to problem class
    problem_params['sol_tol'] = 1E-10  # set tolerance to PETSc' linear solver

    # initialize step parameters
    step_params = dict()
    step_params['maxiter'] = 50

    # initialize space transfer parameters
    space_transfer_params = dict()
    space_transfer_params['rorder'] = 2
    space_transfer_params['iorder'] = 2
    space_transfer_params['periodic'] = False

    # initialize controller parameters
    controller_params = dict()
    controller_params[
        'logger_level'] = 20 if space_rank == 0 else 99  # set level depending on rank
    controller_params['dump_setup'] = False

    # fill description dictionary for easy step instantiation
    description = dict()
    description['problem_class'] = heat2d_petsc_forced  # pass problem class
    description['problem_params'] = problem_params  # pass problem parameters
    description['sweeper_class'] = imex_1st_order  # pass sweeper (see part B)
    description['sweeper_params'] = sweeper_params  # pass sweeper parameters
    description['level_params'] = level_params  # pass level parameters
    description['step_params'] = step_params  # pass step parameters
    description[
        'space_transfer_class'] = mesh_to_mesh_petsc_dmda  # pass spatial transfer class
    description[
        'space_transfer_params'] = space_transfer_params  # pass paramters for spatial transfer

    # set time parameters
    t0 = 0.0
    Tend = 3.0

    # instantiate controller
    controller = controller_MPI(controller_params=controller_params,
                                description=description,
                                comm=time_comm)

    # get initial values on finest level
    P = controller.S.levels[0].prob
    uinit = P.u_exact(t0)

    # call main function to get things done...
    uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend)

    # compute exact solution and compare
    uex = P.u_exact(Tend)
    err = abs(uex - uend)

    # filter statistics by type (number of iterations)
    filtered_stats = filter_stats(stats, type='niter')

    # convert filtered statistics to list of iterations count, sorted by process
    iter_counts = sort_stats(filtered_stats, sortby='time')

    niters = np.array([item[1] for item in iter_counts])

    # limit output to space-rank 0 (as before when setting the logger level)
    if space_rank == 0:

        out = 'This is time-rank %i...' % time_rank
        print(out)

        # compute and print statistics
        for item in iter_counts:
            out = 'Number of iterations for time %4.2f: %2i' % item
            print(out)

        out = '   Mean number of iterations: %4.2f' % np.mean(niters)
        print(out)
        out = '   Range of values for number of iterations: %2i ' % np.ptp(
            niters)
        print(out)
        out = '   Position of max/min number of iterations: %2i -- %2i' % \
              (int(np.argmax(niters)), int(np.argmin(niters)))
        print(out)
        out = '   Std and var for number of iterations: %4.2f -- %4.2f' % (
            float(np.std(niters)), float(np.var(niters)))
        print(out)

        print('   Iteration count linear solver: %i' % P.ksp_itercount)
        print('   Mean Iteration count per call: %4.2f' %
              (P.ksp_itercount / max(P.ksp_ncalls, 1)))

        timing = sort_stats(filter_stats(stats, type='timing_run'),
                            sortby='time')

        out = 'Time to solution: %6.4f sec.' % timing[0][1]
        print(out)
        out = 'Error vs. PDE solution: %6.4e' % err
        print(out)
Ejemplo n.º 10
0
def run_simulation(name=None, nprocs_space=None):
    """
    A simple test program to do PFASST runs for the AC equation
    """

    # set MPI communicator
    comm = MPI.COMM_WORLD

    world_rank = comm.Get_rank()
    world_size = comm.Get_size()

    # split world communicator to create space-communicators
    if nprocs_space is not None:
        color = int(world_rank / nprocs_space)
    else:
        color = int(world_rank / 1)
    space_comm = comm.Split(color=color)
    space_size = space_comm.Get_size()
    space_rank = space_comm.Get_rank()

    # split world communicator to create time-communicators
    if nprocs_space is not None:
        color = int(world_rank % nprocs_space)
    else:
        color = int(world_rank / world_size)
    time_comm = comm.Split(color=color)
    time_size = time_comm.Get_size()
    time_rank = time_comm.Get_rank()

    # initialize level parameters
    level_params = dict()
    level_params['restol'] = 1E-08
    level_params['dt'] = 1E-03
    level_params['nsweeps'] = [3, 1]

    # initialize sweeper parameters
    sweeper_params = dict()
    sweeper_params['collocation_class'] = CollGaussRadau_Right
    sweeper_params['num_nodes'] = [3]
    sweeper_params['QI'] = [
        'LU'
    ]  # For the IMEX sweeper, the LU-trick can be activated for the implicit part
    sweeper_params['initial_guess'] = 'zero'

    # initialize problem parameters
    problem_params = dict()
    problem_params['L'] = 16.0
    problem_params['nvars'] = [(48 * 48, 48 * 48), (8 * 48, 8 * 48)]
    problem_params['eps'] = [0.04]
    problem_params['radius'] = 0.25
    problem_params['TM'] = 1.0
    problem_params['D'] = 0.1
    problem_params['dw'] = [21.0]
    problem_params['comm'] = space_comm
    problem_params['name'] = name
    problem_params['init_type'] = 'circle_rand'
    problem_params['spectral'] = True

    # initialize step parameters
    step_params = dict()
    step_params['maxiter'] = 50

    # initialize controller parameters
    controller_params = dict()
    controller_params[
        'logger_level'] = 20 if space_rank == 0 else 99  # set level depending on rank
    controller_params['hook_class'] = dump
    controller_params['predict_type'] = 'fine_only'

    # fill description dictionary for easy step instantiation
    description = dict()
    description['problem_params'] = problem_params  # pass problem parameters
    description['sweeper_class'] = imex_1st_order
    description['sweeper_params'] = sweeper_params  # pass sweeper parameters
    description['level_params'] = level_params  # pass level parameters
    description['step_params'] = step_params  # pass step parameters
    description['space_transfer_class'] = fft_to_fft
    description['problem_class'] = allencahn_temp_imex

    # set time parameters
    t0 = 0.0
    Tend = 32 * 0.001

    if space_rank == 0 and time_rank == 0:
        out = f'---------> Running {name} with {time_size} process(es) in time and {space_size} process(es) in space...'
        print(out)

    # instantiate controller
    controller = controller_MPI(controller_params=controller_params,
                                description=description,
                                comm=time_comm)

    # get initial values on finest level
    P = controller.S.levels[0].prob
    uinit = P.u_exact(t0)

    # call main function to get things done...
    uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend)

    if space_rank == 0:

        print()

        # convert filtered statistics to list of iterations count, sorted by time
        iter_counts = sort_stats(filter_stats(stats, type='niter'),
                                 sortby='time')

        niters = np.array([item[1] for item in iter_counts])
        out = f'Mean number of iterations on rank {time_rank}: {np.mean(niters):.4f}'
        print(out)

        timing = sort_stats(filter_stats(stats, type='timing_setup'),
                            sortby='time')
        out = f'Setup time on rank {time_rank}: {timing[0][1]:.4f} sec.'
        print(out)

        timing = sort_stats(filter_stats(stats, type='timing_run'),
                            sortby='time')
        out = f'Time to solution on rank {time_rank}: {timing[0][1]:.4f} sec.'
        print(out)
Ejemplo n.º 11
0
from pySDC.implementations.controller_classes.controller_MPI import controller_MPI
from pySDC.tutorial.step_6.A_run_non_MPI_controller import set_parameters_ml

if __name__ == "__main__":
    """
    A simple test program to do MPI-parallel PFASST runs
    """

    # set MPI communicator
    comm = MPI.COMM_WORLD

    # get parameters from Part A
    description, controller_params, t0, Tend = set_parameters_ml()

    # instantiate controllers
    controller = controller_MPI(controller_params=controller_params, description=description, comm=comm)
    # get initial values on finest level
    P = controller.S.levels[0].prob
    uinit = P.u_exact(t0)

    # call main functions to get things done...
    uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend)

    # filter statistics by type (number of iterations)
    filtered_stats = filter_stats(stats, type='niter')

    # convert filtered statistics to list of iterations count, sorted by process
    iter_counts = sort_stats(filtered_stats, sortby='time')

    # combine statistics into list of statistics
    iter_counts_list = comm.gather(iter_counts, root=0)
def run_simulation(name=None, nprocs_space=None):
    """
    A simple test program to do PFASST runs for the AC equation
    """

    # set MPI communicator
    comm = MPI.COMM_WORLD

    world_rank = comm.Get_rank()
    world_size = comm.Get_size()

    # split world communicator to create space-communicators
    if nprocs_space is not None:
        color = int(world_rank / nprocs_space)
    else:
        color = int(world_rank / 1)
    space_comm = comm.Split(color=color)
    space_comm.Set_name('Space-Comm')
    space_size = space_comm.Get_size()
    space_rank = space_comm.Get_rank()

    # split world communicator to create time-communicators
    if nprocs_space is not None:
        color = int(world_rank % nprocs_space)
    else:
        color = int(world_rank / world_size)
    time_comm = comm.Split(color=color)
    time_comm.Set_name('Time-Comm')
    time_size = time_comm.Get_size()
    time_rank = time_comm.Get_rank()

    # print(time_size, space_size, world_size)

    # initialize level parameters
    level_params = dict()
    level_params['restol'] = 1E-08
    level_params['dt'] = 1E-03
    level_params['nsweeps'] = [3, 1]

    # initialize sweeper parameters
    sweeper_params = dict()
    sweeper_params['collocation_class'] = CollGaussRadau_Right
    sweeper_params['num_nodes'] = [3]
    sweeper_params['QI'] = [
        'LU'
    ]  # For the IMEX sweeper, the LU-trick can be activated for the implicit part
    sweeper_params['initial_guess'] = 'zero'

    # initialize problem parameters
    problem_params = dict()
    problem_params['L'] = 4.0
    # problem_params['L'] = 16.0
    problem_params['nvars'] = [(48 * 12, 48 * 12), (8 * 12, 8 * 12)]
    # problem_params['nvars'] = [(48 * 48, 48 * 48), (8 * 48, 8 * 48)]
    problem_params['eps'] = [0.04]
    problem_params['radius'] = 0.25
    problem_params['comm'] = space_comm
    problem_params['name'] = name
    problem_params['init_type'] = 'circle_rand'
    problem_params['spectral'] = False

    if name == 'AC-bench-constforce':
        problem_params['dw'] = [-23.59]

    # initialize step parameters
    step_params = dict()
    step_params['maxiter'] = 50

    # initialize controller parameters
    controller_params = dict()
    controller_params[
        'logger_level'] = 30 if space_rank == 0 else 99  # set level depending on rank
    controller_params['predict_type'] = 'fine_only'
    # controller_params['hook_class'] = dump  # activate to get data output at each step

    # fill description dictionary for easy step instantiation
    description = dict()
    description['problem_params'] = problem_params  # pass problem parameters
    description['sweeper_class'] = imex_1st_order
    description['sweeper_params'] = sweeper_params  # pass sweeper parameters
    description['level_params'] = level_params  # pass level parameters
    description['step_params'] = step_params  # pass step parameters
    description['space_transfer_class'] = fft_to_fft

    if name == 'AC-bench-noforce' or name == 'AC-bench-constforce':
        description['problem_class'] = allencahn_imex
    elif name == 'AC-bench-timeforce':
        description['problem_class'] = allencahn_imex_timeforcing
    else:
        raise NotImplementedError(f'{name} is not implemented')

    # set time parameters
    t0 = 0.0
    Tend = 240 * 0.001

    if space_rank == 0 and time_rank == 0:
        out = f'---------> Running {name} with {time_size} process(es) in time and {space_size} process(es) in space...'
        print(out)

    # instantiate controller
    controller = controller_MPI(controller_params=controller_params,
                                description=description,
                                comm=time_comm)

    # get initial values on finest level
    P = controller.S.levels[0].prob
    uinit = P.u_exact(t0)

    # call main function to get things done...
    uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend)

    timing = sort_stats(filter_stats(stats, type='timing_setup'),
                        sortby='time')
    max_timing_setup = time_comm.allreduce(timing[0][1], MPI.MAX)
    timing = sort_stats(filter_stats(stats, type='timing_run'), sortby='time')
    max_timing = time_comm.allreduce(timing[0][1], MPI.MAX)

    if space_rank == 0 and time_rank == time_size - 1:
        print()

        out = f'Setup time: {max_timing_setup:.4f} sec.'
        print(out)

        out = f'Time to solution: {max_timing:.4f} sec.'
        print(out)

        iter_counts = sort_stats(filter_stats(stats, type='niter'),
                                 sortby='time')
        niters = np.array([item[1] for item in iter_counts])
        out = f'Mean number of iterations: {np.mean(niters):.4f}'
        print(out)
Ejemplo n.º 13
0
def main():
    """
    A simple test program to do PFASST runs for the heat equation
    """

    # set MPI communicator
    comm = MPI.COMM_WORLD

    world_rank = comm.Get_rank()
    world_size = comm.Get_size()

    # split world communicator to create space-communicators
    if len(sys.argv) >= 2:
        color = int(world_rank / int(sys.argv[1]))
    else:
        color = int(world_rank / 1)
    space_comm = comm.Split(color=color)
    space_size = space_comm.Get_size()
    space_rank = space_comm.Get_rank()

    # split world communicator to create time-communicators
    if len(sys.argv) >= 2:
        color = int(world_rank % int(sys.argv[1]))
    else:
        color = int(world_rank / world_size)
    time_comm = comm.Split(color=color)
    time_size = time_comm.Get_size()
    time_rank = time_comm.Get_rank()

    print(
        "IDs (world, space, time):  %i / %i -- %i / %i -- %i / %i" %
        (world_rank, world_size, space_rank, space_size, time_rank, time_size))

    # initialize level parameters
    level_params = dict()
    level_params['restol'] = 1E-08
    level_params['dt'] = 1E-03
    level_params['nsweeps'] = [1]

    # initialize sweeper parameters
    sweeper_params = dict()
    sweeper_params['collocation_class'] = CollGaussRadau_Right
    sweeper_params['num_nodes'] = [3]
    sweeper_params['QI'] = [
        'LU'
    ]  # For the IMEX sweeper, the LU-trick can be activated for the implicit part
    # sweeper_params['spread'] = False

    # initialize problem parameters
    problem_params = dict()
    problem_params['nu'] = 2
    problem_params['L'] = 1.0
    problem_params['nvars'] = [(128, 128), (64, 64)]
    problem_params['eps'] = [0.04]
    problem_params['radius'] = 0.25
    problem_params['comm'] = space_comm

    # initialize step parameters
    step_params = dict()
    step_params['maxiter'] = 50

    # initialize controller parameters
    controller_params = dict()
    controller_params[
        'logger_level'] = 20 if space_rank == 0 else 99  # set level depending on rank
    controller_params['hook_class'] = monitor

    # fill description dictionary for easy step instantiation
    description = dict()
    description['problem_class'] = allencahn2d_dedalus
    description['problem_params'] = problem_params  # pass problem parameters
    description['sweeper_class'] = imex_1st_order
    description['sweeper_params'] = sweeper_params  # pass sweeper parameters
    description['level_params'] = level_params  # pass level parameters
    description['step_params'] = step_params  # pass step parameters
    description['space_transfer_class'] = dedalus_field_transfer
    # description['space_transfer_params'] = space_transfer_params  # pass paramters for spatial transfer

    # set time parameters
    t0 = 0.0
    Tend = 27 * 0.001

    # instantiate controller
    controller = controller_MPI(controller_params=controller_params,
                                description=description,
                                comm=time_comm)

    # get initial values on finest level
    P = controller.S.levels[0].prob
    uinit = P.u_exact(t0)

    # call main function to get things done...
    uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend)

    # filter statistics by type (number of iterations)
    filtered_stats = filter_stats(stats, type='niter')

    # convert filtered statistics to list of iterations count, sorted by process
    iter_counts = sort_stats(filtered_stats, sortby='time')

    if space_rank == 0:

        # compute and print statistics
        for item in iter_counts:
            out = 'Number of iterations for time %4.2f: %2i' % item
            print(out)

        niters = np.array([item[1] for item in iter_counts])
        out = f'Mean number of iterations on rank {time_rank}: {np.mean(niters):.4f}'
        print(out)

        timing = sort_stats(filter_stats(stats, type='timing_run'),
                            sortby='time')

        out = f'Time to solution on rank {time_rank}: {timing[0][1]:.4f} sec.'
        print(out)
Ejemplo n.º 14
0
def run(sweeper_list,
        MPI_fake=True,
        controller_comm=MPI.COMM_WORLD,
        node_comm=None,
        node_list=None):

    # initialize level parameters
    level_params = dict()
    level_params['restol'] = 1E-08
    level_params['dt'] = 1  #0.5
    level_params['nsweeps'] = [1]

    # initialize sweeper parameters
    sweeper_params = dict()
    sweeper_params['collocation_class'] = CollGaussRadau_Right
    sweeper_params['num_nodes'] = [4]  #[4]
    sweeper_params['QI'] = ['LU']
    sweeper_params['initial_guess'] = 'zero'
    sweeper_params['fixed_time_in_jacobian'] = 0
    sweeper_params['comm'] = node_comm
    sweeper_params['node_list'] = node_list

    # This comes as read-in for the step class (this is optional!)
    step_params = dict()
    step_params['maxiter'] = 50

    # initialize problem parameters
    problem_params = dict()
    problem_params['D0'] = 1e-4  #1.0
    problem_params['D1'] = 1e-5  #0.01
    problem_params['f'] = 0.0367  #0.09
    problem_params['k'] = 0.0649  #0.086
    problem_params['nvars'] = [(2, 32, 32),
                               (2, 16, 16)]  # [(128, 128),(64,64)]
    problem_params['nlsol_tol'] = 1E-10
    problem_params['nlsol_maxiter'] = 100
    problem_params['lsol_tol'] = 1E-10
    problem_params['lsol_maxiter'] = 100
    problem_params['newton_maxiter'] = 100
    problem_params['newton_tol'] = 1E-11
    problem_params['lin_tol'] = 1E-12
    problem_params['lin_maxiter'] = 450
    problem_params['comm'] = node_comm

    # initialize controller parameters
    controller_params = dict()
    controller_params['logger_level'] = 30
    controller_params['hook_class'] = err_reduction_hook

    # fill description dictionary for easy step instantiation
    description = dict()
    description[
        'problem_class'] = GS_jac  #grayscott_fullyimplicit   # pass problem class
    description['problem_params'] = problem_params  # pass problem parameters
    description['sweeper_class'] = None  # pass sweeper (see part B)
    description['sweeper_params'] = sweeper_params  # pass sweeper parameters
    description['level_params'] = level_params  # pass level parameters
    description['step_params'] = step_params  # pass step parameters
    description['space_transfer_class'] = mesh_to_mesh

    #description['space_transfer_class'] = base_transfer_MPI #mesh_to_mesh

    #assert MPI.COMM_WORLD.Get_size() == sweeper_params['num_nodes']

    # setup parameters "in time"
    t0 = 0
    Tend = 8.

    # loop over the different sweepers and check results
    serial = True
    for sweeper in sweeper_list:
        description['sweeper_class'] = sweeper
        error_reduction = []
        for dt in [1.]:  #0.5]: #1e-3]:
            print('Working with sweeper %s and dt = %s...' %
                  (sweeper.__name__, dt))

            level_params['dt'] = dt
            description['level_params'] = level_params

            # instantiate the controller and stuff
            if (sweeper.__name__ == 'generic_implicit'):
                if (controller_comm.Get_size() == 1):
                    controller = controller_nonMPI(
                        num_procs=1,
                        controller_params=controller_params,
                        description=description)
                    serial = False
                else:
                    controller = controller_MPI(
                        controller_params=controller_params,
                        description=description,
                        comm=controller_comm)
            elif (sweeper.__name__
                  == 'linearized_implicit_fixed_parallel_prec_MPI' or
                  sweeper.__name__ == "linearized_implicit_fixed_parallel_MPI"
                  or sweeper.__name__
                  == 'linearized_implicit_fixed_parallel_prec'
                  or sweeper.__name__ == "linearized_implicit_fixed_parallel"):
                #if(node_comm is None):
                #description['base_transfer_class'] = base_transfer
                #controller = controller_MPI(controller_params=controller_params, description=description, comm=controller_comm)
                #else:
                #    serial==False
                if (sweeper.__name__
                        == 'linearized_implicit_fixed_parallel_prec_MPI'
                        or sweeper.__name__
                        == "linearized_implicit_fixed_parallel_MPI"):
                    description['base_transfer_class'] = base_transfer_MPI
                    controller = controller_MPI(
                        controller_params=controller_params,
                        description=description,
                        comm=controller_comm)
                else:
                    controller = controller_MPI(
                        controller_params=controller_params,
                        description=description,
                        comm=controller_comm)
                    #controller = controller_nonMPI(num_procs=1, controller_params=controller_params, description=description)
                    #serial=False
            elif (sweeper.__name__
                  == 'div_linearized_implicit_fixed_parallel_prec_MPI'
                  or sweeper.__name__
                  == 'div_linearized_implicit_fixed_parallel_MPI'):
                description['base_transfer_class'] = div_base_transfer_MPI
                controller = controller_MPI(
                    controller_params=controller_params,
                    description=description,
                    comm=controller_comm)
                serial == False

            if (serial == True):
                P = controller.S.levels[0].prob
            else:
                P = controller.MS[0].levels[0].prob

            # get initial values on finest level
            uinit = P.u_exact(t0)

            # call main function to get things done...
            MPI.COMM_WORLD.Barrier()
            t1 = MPI.Wtime()
            uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend)
            t2 = MPI.Wtime()
            time = t2 - t1
            print("My elapsed time is ", time)
            maxtime = time
            maxtime = np.zeros(1, dtype='float64')
            local_time = np.max(time).astype('float64')
            MPI.COMM_WORLD.Allreduce(local_time, maxtime, op=MPI.MAX)
            print("Elapsed max time is ", maxtime[0])

            if (MPI.COMM_WORLD.Get_rank() == 0):
                #fname = 'neue32referenz.npz'
                #loaded = np.load(fname)
                #uref = loaded['uend']

                #print("erstes", uref.reshape([32,32,2])[14:17,14:17,0]  )
                #print("zweites", uend.values[0,14:17,14:17])
                #print("neue super abweichung vom persc wert ", np.linalg.norm(uref.reshape([32,32,2])[:,:,0]-uend.values[0,:,:], np.inf))
                print(
                    "Abweichung vom Anfangswert ",
                    np.linalg.norm(
                        uinit.values[0, :, :] - uend.values[0, :, :], np.inf))
                np.save("u_values", uend.values[0, :, :])
                np.save("v_values", uend.values[1, :, :])

            if (False):  #control the solution
                fname = 'ref_24.npz'
                loaded = np.load(fname)
                uref = loaded['uend']
                print("Fehler ", np.linalg.norm(uref - uend.values, np.inf))
                print("Abweichung vom Anfangswert ",
                      np.linalg.norm(uinit.values - uend.values, np.inf))

            #plt.imshow(uend.values[0,:,:], interpolation='bilinear')
            #plt.savefig(str(Tend)+'ruthmesh.pdf')
            #plt.imshow(uref.reshape([32,32,2])[:,:,0], interpolation='bilinear')
            #plt.savefig('endepython.pdf')

            # compute and print statistics
            filtered_stats = filter_stats(stats, type='niter')
            iter_counts = sort_stats(filtered_stats, sortby='time')
            niters = np.array([item[1] for item in iter_counts])
            print("Iterationen SDC ", niters)
            #print("Newton Iterationen ", P.newton_itercount)

            #maxcount = np.zeros(1, dtype='float64')
            #local_count = np.max(P.newton_itercount).astype('float64')
            #MPI.COMM_WORLD.Allreduce(local_count,maxcount, op=MPI.MAX)
            #print("maxiter ", maxcount[0])

            if (serial == False):

                for pp in controller.MS:
                    print("Newton Iter ", pp.levels[0].prob.newton_itercount)
                    #print("lineare Iter ", pp.levels[0].prob.linear_count)
                    #print("warning ", pp.levels[0].prob.warning_count)
                    #print("time for linear solve ", pp.levels[0].prob.time_for_solve)
            else:

                print("Newton Iter "
                      )  #, controller.S.levels[0].prob.newton_itercount)
                #print("lineare Iter ", controller.S.levels[0].prob.linear_count)
                #print("warning ", controller.S.levels[0].prob.warning_count)
                #print("time for linear solve ", controller.S.levels[0].prob.time_for_solve)

            MPI.COMM_WORLD.Barrier()
            print(
                "------------------------------------------------------------------------------------------------------------------------------------------"
            )