Coverage for pySDC/projects/AllenCahn_Bayreuth/run_temp_forcing_reference.py: 0%

74 statements  

« prev     ^ index     » next       coverage.py v7.5.0, created at 2024-04-29 09:02 +0000

1from argparse import ArgumentParser 

2import numpy as np 

3from mpi4py import MPI 

4 

5from pySDC.helpers.stats_helper import get_sorted 

6 

7from pySDC.implementations.controller_classes.controller_nonMPI import controller_nonMPI 

8from pySDC.implementations.sweeper_classes.imex_1st_order import imex_1st_order 

9from pySDC.implementations.problem_classes.AllenCahn_Temp_MPIFFT import allencahn_temp_imex 

10 

11from pySDC.projects.AllenCahn_Bayreuth.AllenCahn_dump import dump 

12 

13 

14def run_simulation(name='', spectral=None, nprocs_space=None): 

15 """ 

16 A test program to create reference data for the AC equation with temporal forcing 

17 

18 Args: 

19 name (str): name of the run, will be used to distinguish different setups 

20 spectral (bool): run in real or spectral space 

21 nprocs_space (int): number of processors in space (None if serial) 

22 """ 

23 

24 # set MPI communicator 

25 comm = MPI.COMM_WORLD 

26 

27 world_rank = comm.Get_rank() 

28 world_size = comm.Get_size() 

29 

30 # split world communicator to create space-communicators 

31 if nprocs_space is not None: 

32 color = int(world_rank / nprocs_space) 

33 else: 

34 color = int(world_rank / 1) 

35 space_comm = comm.Split(color=color) 

36 space_rank = space_comm.Get_rank() 

37 space_size = space_comm.Get_size() 

38 

39 assert world_size == space_size, 'This script cannot run parallel-in-time with MPI, only spatial parallelism' 

40 

41 # initialize level parameters 

42 level_params = dict() 

43 level_params['restol'] = 1e-12 

44 level_params['dt'] = 1e-06 

45 level_params['nsweeps'] = [1] 

46 

47 # initialize sweeper parameters 

48 sweeper_params = dict() 

49 sweeper_params['quad_type'] = 'RADAU-RIGHT' 

50 sweeper_params['num_nodes'] = [7] 

51 sweeper_params['QI'] = ['LU'] # For the IMEX sweeper, the LU-trick can be activated for the implicit part 

52 sweeper_params['initial_guess'] = 'spread' 

53 

54 # initialize problem parameters 

55 problem_params = dict() 

56 problem_params['L'] = 1.0 

57 problem_params['nvars'] = [(128, 128)] 

58 problem_params['eps'] = [0.04] 

59 problem_params['radius'] = 0.25 

60 problem_params['TM'] = 1.0 

61 problem_params['D'] = 0.1 

62 problem_params['dw'] = [21.0] 

63 problem_params['comm'] = space_comm 

64 problem_params['init_type'] = 'circle' 

65 problem_params['spectral'] = spectral 

66 

67 # initialize step parameters 

68 step_params = dict() 

69 step_params['maxiter'] = 50 

70 

71 # initialize controller parameters 

72 controller_params = dict() 

73 controller_params['logger_level'] = 20 if space_rank == 0 else 99 # set level depending on rank 

74 controller_params['hook_class'] = dump 

75 

76 # fill description dictionary for easy step instantiation 

77 description = dict() 

78 description['problem_params'] = problem_params # pass problem parameters 

79 description['sweeper_class'] = imex_1st_order 

80 description['sweeper_params'] = sweeper_params # pass sweeper parameters 

81 description['level_params'] = level_params # pass level parameters 

82 description['step_params'] = step_params # pass step parameters 

83 description['problem_class'] = allencahn_temp_imex 

84 

85 # set time parameters 

86 t0 = 0.0 

87 Tend = 1 * 0.001 

88 

89 if space_rank == 0: 

90 out = f'---------> Running {name} with spectral={spectral} and {space_size} process(es) in space...' 

91 print(out) 

92 

93 # instantiate controller 

94 controller = controller_nonMPI(num_procs=1, controller_params=controller_params, description=description) 

95 

96 # get initial values on finest level 

97 P = controller.MS[0].levels[0].prob 

98 uinit = P.u_exact(t0) 

99 

100 # call main function to get things done... 

101 uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend) 

102 

103 if space_rank == 0: 

104 print() 

105 

106 # convert filtered statistics of iterations count, sorted by time 

107 iter_counts = get_sorted(stats, type='niter', sortby='time') 

108 niters = np.mean(np.array([item[1] for item in iter_counts])) 

109 out = f'Mean number of iterations: {niters:.4f}' 

110 print(out) 

111 

112 # get setup time 

113 timing = get_sorted(stats, type='timing_setup', sortby='time') 

114 out = f'Setup time: {timing[0][1]:.4f} sec.' 

115 print(out) 

116 

117 # get running time 

118 timing = get_sorted(stats, type='timing_run', sortby='time') 

119 out = f'Time to solution: {timing[0][1]:.4f} sec.' 

120 print(out) 

121 

122 out = '...Done <---------\n' 

123 print(out) 

124 

125 space_comm.Free() 

126 

127 

128def main(nprocs_space=None): 

129 """ 

130 Little helper routine to run the whole thing 

131 

132 Args: 

133 nprocs_space (int): number of processors in space (None if serial) 

134 

135 """ 

136 name = 'AC-reference-tempforce' 

137 run_simulation(name=name, spectral=False, nprocs_space=nprocs_space) 

138 # run_simulation(name=name, spectral=True, nprocs_space=nprocs_space) 

139 

140 

141if __name__ == "__main__": 

142 # Add parser to get number of processors in space (have to do this here to enable automatic testing) 

143 parser = ArgumentParser() 

144 parser.add_argument("-n", "--nprocs_space", help='Specifies the number of processors in space', type=int) 

145 args = parser.parse_args() 

146 

147 main(nprocs_space=args.nprocs_space)