Coverage for pySDC/tutorial/step_7/E_pySDC_with_Firedrake.py: 0%

105 statements  

« prev     ^ index     » next       coverage.py v7.6.12, created at 2025-02-20 10:09 +0000

1""" 

2Simple example running a forced heat equation in Firedrake. 

3 

4The function `setup` generates the description and controller_params dictionaries needed to run SDC with diagonal preconditioner. 

5This proceeds very similar to earlier tutorials. The interesting part of this tutorial is rather in the problem class. 

6See `pySDC/implementations/problem_classes/HeatFiredrake` for an easy example of how to use Firedrake within pySDC. 

7 

8The script allows to run in three different ways. Use 

9 - `python E_pySDC_with_Firedrake.py` for single-level serial SDC 

10 - `mpiexec -np 3 E_pySDC_with_Firedrake --useMPIsweeper` for single-level MPI-parallel diagonal SDC 

11 - `python E_pySDC_with_Firedrake --ML` for three-level serial SDC 

12 

13You should notice that speedup of MPI parallelisation is quite good and that, while multi-level SDC reduces the number 

14of SDC iterations quite a bit, it does not reduce time to solution in this case. This is partly due to more solvers being 

15constructed when using coarse levels. Also, we do not claim to have found the best parameters, though. This is just an 

16example to demonstrate how to use it. 

17""" 

18 

19import numpy as np 

20from mpi4py import MPI 

21 

22 

23def setup(useMPIsweeper): 

24 """ 

25 Helper routine to set up parameters 

26 

27 Returns: 

28 description and controller_params parameter dictionaries 

29 """ 

30 from pySDC.implementations.problem_classes.HeatFiredrake import Heat1DForcedFiredrake 

31 from pySDC.implementations.sweeper_classes.imex_1st_order import imex_1st_order 

32 from pySDC.implementations.sweeper_classes.imex_1st_order_MPI import imex_1st_order_MPI 

33 from pySDC.implementations.hooks.log_errors import LogGlobalErrorPostRun 

34 from pySDC.implementations.hooks.log_work import LogWork 

35 from pySDC.helpers.firedrake_ensemble_communicator import FiredrakeEnsembleCommunicator 

36 

37 # setup space-time parallelism via ensemble for Firedrake, see https://www.firedrakeproject.org/firedrake/parallelism.html 

38 num_nodes = 3 

39 ensemble = FiredrakeEnsembleCommunicator(MPI.COMM_WORLD, max([MPI.COMM_WORLD.size // num_nodes, 1])) 

40 

41 level_params = dict() 

42 level_params['restol'] = 5e-10 

43 level_params['dt'] = 0.2 

44 

45 step_params = dict() 

46 step_params['maxiter'] = 20 

47 

48 sweeper_params = dict() 

49 sweeper_params['quad_type'] = 'RADAU-RIGHT' 

50 sweeper_params['num_nodes'] = num_nodes 

51 sweeper_params['QI'] = 'MIN-SR-S' 

52 sweeper_params['QE'] = 'PIC' 

53 sweeper_params['comm'] = ensemble 

54 

55 problem_params = dict() 

56 problem_params['nu'] = 0.1 

57 problem_params['n'] = 128 

58 problem_params['c'] = 1.0 

59 problem_params['comm'] = ensemble.space_comm 

60 

61 controller_params = dict() 

62 controller_params['logger_level'] = 15 if MPI.COMM_WORLD.rank == 0 else 30 

63 controller_params['hook_class'] = [LogGlobalErrorPostRun, LogWork] 

64 

65 description = dict() 

66 description['problem_class'] = Heat1DForcedFiredrake 

67 description['problem_params'] = problem_params 

68 description['sweeper_class'] = imex_1st_order_MPI if useMPIsweeper else imex_1st_order 

69 description['sweeper_params'] = sweeper_params 

70 description['level_params'] = level_params 

71 description['step_params'] = step_params 

72 

73 return description, controller_params 

74 

75 

76def setup_ML(): 

77 """ 

78 Helper routine to set up parameters 

79 

80 Returns: 

81 description and controller_params parameter dictionaries 

82 """ 

83 from pySDC.implementations.problem_classes.HeatFiredrake import Heat1DForcedFiredrake 

84 from pySDC.implementations.sweeper_classes.imex_1st_order import imex_1st_order 

85 from pySDC.implementations.sweeper_classes.imex_1st_order_MPI import imex_1st_order_MPI 

86 from pySDC.implementations.transfer_classes.TransferFiredrakeMesh import MeshToMeshFiredrake 

87 from pySDC.implementations.hooks.log_errors import LogGlobalErrorPostRun 

88 from pySDC.implementations.hooks.log_work import LogWork 

89 from pySDC.helpers.firedrake_ensemble_communicator import FiredrakeEnsembleCommunicator 

90 

91 level_params = dict() 

92 level_params['restol'] = 5e-10 

93 level_params['dt'] = 0.2 

94 

95 step_params = dict() 

96 step_params['maxiter'] = 20 

97 

98 sweeper_params = dict() 

99 sweeper_params['quad_type'] = 'RADAU-RIGHT' 

100 sweeper_params['num_nodes'] = 3 

101 sweeper_params['QI'] = 'MIN-SR-S' 

102 sweeper_params['QE'] = 'PIC' 

103 

104 problem_params = dict() 

105 problem_params['nu'] = 0.1 

106 problem_params['n'] = [128, 32, 4] 

107 problem_params['c'] = 1.0 

108 

109 base_transfer_params = dict() 

110 base_transfer_params['finter'] = True 

111 

112 controller_params = dict() 

113 controller_params['logger_level'] = 15 if MPI.COMM_WORLD.rank == 0 else 30 

114 controller_params['hook_class'] = [LogGlobalErrorPostRun, LogWork] 

115 

116 description = dict() 

117 description['problem_class'] = Heat1DForcedFiredrake 

118 description['problem_params'] = problem_params 

119 description['sweeper_class'] = imex_1st_order 

120 description['sweeper_params'] = sweeper_params 

121 description['level_params'] = level_params 

122 description['step_params'] = step_params 

123 description['space_transfer_class'] = MeshToMeshFiredrake 

124 description['base_transfer_params'] = base_transfer_params 

125 

126 return description, controller_params 

127 

128 

129def runHeatFiredrake(useMPIsweeper=False, ML=False): 

130 """ 

131 Run the example defined by the above parameters 

132 """ 

133 from pySDC.implementations.controller_classes.controller_nonMPI import controller_nonMPI 

134 from pySDC.helpers.stats_helper import get_sorted 

135 

136 Tend = 1.0 

137 t0 = 0.0 

138 

139 if ML: 

140 assert not useMPIsweeper, 'MPI parallel diagonal SDC and ML SDC are not compatible at the moment' 

141 description, controller_params = setup_ML() 

142 else: 

143 description, controller_params = setup(useMPIsweeper) 

144 

145 controller = controller_nonMPI(num_procs=1, controller_params=controller_params, description=description) 

146 

147 # get initial values 

148 P = controller.MS[0].levels[0].prob 

149 uinit = P.u_exact(0.0) 

150 

151 # call main function to get things done... 

152 uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend) 

153 

154 # see what we get 

155 error = get_sorted(stats, type='e_global_post_run') 

156 work_solver_setup = get_sorted(stats, type='work_solver_setup') 

157 work_solves = get_sorted(stats, type='work_solves') 

158 work_rhs = get_sorted(stats, type='work_rhs') 

159 niter = get_sorted(stats, type='niter') 

160 

161 tot_iter = np.sum([me[1] for me in niter]) 

162 tot_solver_setup = np.sum([me[1] for me in work_solver_setup]) 

163 tot_solves = np.sum([me[1] for me in work_solves]) 

164 tot_rhs = np.sum([me[1] for me in work_rhs]) 

165 

166 time_rank = description["sweeper_params"]["comm"].rank if useMPIsweeper else 0 

167 print( 

168 f'Finished with error {error[0][1]:.2e}. Used {tot_iter} SDC iterations, with {tot_solver_setup} solver setups, {tot_solves} solves and {tot_rhs} right hand side evaluations on the finest level of time task {time_rank}.' 

169 ) 

170 

171 # do tests that we got the same as last time 

172 n_nodes = 1 if useMPIsweeper else description['sweeper_params']['num_nodes'] 

173 assert error[0][1] < 2e-8 

174 assert tot_iter == 10 if ML else 29 

175 assert tot_solver_setup == n_nodes 

176 assert tot_solves == n_nodes * tot_iter 

177 assert tot_rhs == n_nodes * tot_iter + (n_nodes + 1) * len(niter) 

178 

179 

180if __name__ == "__main__": 

181 from argparse import ArgumentParser 

182 

183 parser = ArgumentParser() 

184 parser.add_argument( 

185 '--ML', 

186 help='Whether you want to run multi-level', 

187 default=False, 

188 required=False, 

189 action='store_const', 

190 const=True, 

191 ) 

192 parser.add_argument( 

193 '--useMPIsweeper', 

194 help='Whether you want to use MPI parallel diagonal SDC', 

195 default=False, 

196 required=False, 

197 action='store_const', 

198 const=True, 

199 ) 

200 

201 args = parser.parse_args() 

202 

203 runHeatFiredrake(**vars(args))