Coverage for pySDC/tutorial/step_9/C_paradiag_in_pySDC.py: 100%
88 statements
« prev ^ index » next coverage.py v7.10.4, created at 2025-08-21 06:49 +0000
« prev ^ index » next coverage.py v7.10.4, created at 2025-08-21 06:49 +0000
1"""
2This script shows how to setup ParaDiag in pySDC for two examples and compares performance to single-level PFASST in
3Jacobi mode and serial time stepping.
4In PFASST, we use a diagonal preconditioner, which allows for the same amount of parallelism as ParaDiag.
5We show iteration counts per step here, but both schemes have further concurrency across the nodes.
7We have a linear advection example, discretized with finite differences, where ParaDiag converges in very few iterations.
8PFASST, on the hand, needs a lot more iterations for this hyperbolic problem.
9Note that we did not optimize either setup. With different choice of alpha in ParaDiag, or inexactness and coarsening
10in PFASST, both schemes could be improved significantly.
12Second is the nonlinear van der Pol oscillator. We choose the mu parameter such that the problem is not overly stiff.
13Here, ParaDiag needs many iterations compared to PFASST, but remember that we only perform one Newton iteration per
14ParaDiag iteration. So per node, the number of Newton iterations is equal to the number of ParaDiag iterations.
15In PFASST, on the other hand, we solve the systems to some accuracy and allow more iterations. Here, ParaDiag needs
16fewer Newton iterations per step in total, leaving it with greater speedup. Again, inexactness could improve PFASST.
18This script is not meant to show that one parallelization scheme is better than the other. It does, however, demonstrate
19that both schemes, without optimization, need fewer iterations per task than serial time stepping. Kindly refrain from
20computing parallel efficiency for these examples, however. ;)
21"""
23import numpy as np
24import sys
25from pySDC.helpers.stats_helper import get_sorted
27# prepare output
28out_file = open('data/step_9_C_out.txt', 'w')
31def my_print(*args, **kwargs):
32 for output in [sys.stdout, out_file]:
33 print(*args, **kwargs, file=output)
36def get_description(problem='advection', mode='ParaDiag'):
37 level_params = {}
38 level_params['dt'] = 0.1
39 level_params['restol'] = 1e-6
41 sweeper_params = {}
42 sweeper_params['quad_type'] = 'RADAU-RIGHT'
43 sweeper_params['num_nodes'] = 3
44 sweeper_params['initial_guess'] = 'copy'
46 if mode == 'ParaDiag':
47 from pySDC.implementations.sweeper_classes.ParaDiagSweepers import QDiagonalization as sweeper_class
49 # we only want to use the averaged Jacobian and do only one Newton iteration per ParaDiag iteration!
50 else:
51 from pySDC.implementations.sweeper_classes.generic_implicit import generic_implicit as sweeper_class
53 # need diagonal preconditioner for same concurrency as ParaDiag
54 sweeper_params['QI'] = 'MIN-SR-S'
56 if problem == 'advection':
57 from pySDC.implementations.problem_classes.AdvectionEquation_ND_FD import advectionNd as problem_class
59 problem_params = {'nvars': 64, 'order': 8, 'c': 1, 'solver_type': 'GMRES', 'lintol': 1e-8}
60 elif problem == 'vdp':
61 from pySDC.implementations.problem_classes.Van_der_Pol_implicit import vanderpol as problem_class
63 # need to not raise an error when Newton has not converged because we do only one iteration
64 problem_params = {'newton_maxiter': 99, 'crash_at_maxiter': False, 'mu': 1, 'newton_tol': 1e-9}
66 step_params = {}
67 step_params['maxiter'] = 99
69 description = {}
70 description['problem_class'] = problem_class
71 description['problem_params'] = problem_params
72 description['sweeper_class'] = sweeper_class
73 description['sweeper_params'] = sweeper_params
74 description['level_params'] = level_params
75 description['step_params'] = step_params
77 return description
80def get_controller_params(problem='advection', mode='ParaDiag'):
81 from pySDC.implementations.hooks.log_errors import LogGlobalErrorPostRun
82 from pySDC.implementations.hooks.log_work import LogWork, LogSDCIterations
84 controller_params = {}
85 controller_params['logger_level'] = 30
86 controller_params['hook_class'] = [LogGlobalErrorPostRun, LogWork, LogSDCIterations]
88 if mode == 'ParaDiag':
89 controller_params['alpha'] = 1e-4
91 # For nonlinear problems, we need to communicate the average solution, which allows to compute the average
92 # Jacobian locally. For linear problems, we do not want the extra communication.
93 if problem == 'advection':
94 controller_params['average_jacobians'] = False
95 elif problem == 'vdp':
96 controller_params['average_jacobians'] = True
97 else:
98 # We do Block-Jacobi multi-step SDC here. It's a bit silly but it's better for comparing "speedup"
99 controller_params['mssdc_jac'] = True
101 return controller_params
104def run_problem(
105 n_steps=4,
106 problem='advection',
107 mode='ParaDiag',
108):
109 if mode == 'ParaDiag':
110 from pySDC.implementations.controller_classes.controller_ParaDiag_nonMPI import (
111 controller_ParaDiag_nonMPI as controller_class,
112 )
113 else:
114 from pySDC.implementations.controller_classes.controller_nonMPI import controller_nonMPI as controller_class
116 if mode == 'serial':
117 num_procs = 1
118 else:
119 num_procs = n_steps
121 description = get_description(problem, mode)
122 controller_params = get_controller_params(problem, mode)
124 controller = controller_class(num_procs=num_procs, description=description, controller_params=controller_params)
126 for S in controller.MS:
127 S.levels[0].prob.init = tuple([*S.levels[0].prob.init[:2]] + [np.dtype('complex128')])
129 P = controller.MS[0].levels[0].prob
131 t0 = 0.0
132 uinit = P.u_exact(t0)
134 uend, stats = controller.run(u0=uinit, t0=t0, Tend=n_steps * controller.MS[0].levels[0].dt)
135 return uend, stats
138def compare_ParaDiag_and_PFASST(n_steps, problem):
139 my_print(f'Running {problem} with {n_steps} steps')
141 uend_PD, stats_PD = run_problem(n_steps, problem, mode='ParaDiag')
142 uend_PF, stats_PF = run_problem(n_steps, problem, mode='PFASST')
143 uend_S, stats_S = run_problem(n_steps, problem, mode='serial')
145 assert np.allclose(uend_PD, uend_PF)
146 assert np.allclose(uend_S, uend_PD)
147 assert (
148 abs(uend_PD - uend_PF) > 0
149 ) # two different iterative methods should not give identical results for non-zero tolerance
151 k_PD = get_sorted(stats_PD, type='k')
152 k_PF = get_sorted(stats_PF, type='k')
154 my_print(
155 f'Needed {max(me[1] for me in k_PD)} ParaDiag iterations and {max(me[1] for me in k_PF)} single-level PFASST iterations'
156 )
157 if problem == 'advection':
158 k_GMRES_PD = get_sorted(stats_PD, type='work_GMRES')
159 k_GMRES_PF = get_sorted(stats_PF, type='work_GMRES')
160 k_GMRES_S = get_sorted(stats_S, type='work_GMRES')
161 my_print(
162 f'Maximum GMRES iterations on each step: {max(me[1] for me in k_GMRES_PD)} in ParaDiag, {max(me[1] for me in k_GMRES_PF)} in single-level PFASST and {sum(me[1] for me in k_GMRES_S)} total GMRES iterations in serial'
163 )
164 elif problem == 'vdp':
165 k_Jac_PD = get_sorted(stats_PD, type='work_jacobian_solves')
166 k_Jac_PF = get_sorted(stats_PF, type='work_jacobian_solves')
167 k_Jac_S = get_sorted(stats_S, type='work_jacobian_solves')
168 my_print(
169 f'Maximum Jacabian solves on each step: {max(me[1] for me in k_Jac_PD)} in ParaDiag, {max(me[1] for me in k_Jac_PF)} in single-level PFASST and {sum(me[1] for me in k_Jac_S)} total Jacobian solves in serial'
170 )
171 my_print()
174if __name__ == '__main__':
175 out_file = open('data/step_9_C_out.txt', 'w')
176 params = {
177 'n_steps': 16,
178 }
179 # compare_ParaDiag_and_PFASST(**params, problem='advection')
180 compare_ParaDiag_and_PFASST(**params, problem='vdp')