Coverage for pySDC/projects/AllenCahn_Bayreuth/AllenCahn_dump.py: 0%
91 statements
« prev ^ index » next coverage.py v7.6.7, created at 2024-11-16 14:51 +0000
« prev ^ index » next coverage.py v7.6.7, created at 2024-11-16 14:51 +0000
1import numpy as np
2import json
3from mpi4py import MPI
4from mpi4py_fft import newDistArray
5from pySDC.core.hooks import Hooks
8class dump(Hooks):
9 def __init__(self):
10 """
11 Initialization of Allen-Cahn monitoring
12 """
13 super(dump, self).__init__()
15 self.comm = None
16 self.rank = None
17 self.size = None
18 self.amode = MPI.MODE_WRONLY | MPI.MODE_CREATE
19 self.time_step = None
21 def pre_run(self, step, level_number):
22 """
23 Overwrite standard pre run hook
25 Args:
26 step (pySDC.Step.step): the current step
27 level_number (int): the current level number
28 """
29 super(dump, self).pre_run(step, level_number)
30 L = step.levels[0]
32 # get space-communicator and data
33 self.comm = L.prob.params.comm
34 if self.comm is not None:
35 self.rank = self.comm.Get_rank()
36 self.size = self.comm.Get_size()
37 else:
38 self.rank = 0
39 self.size = 1
41 # get real space values
42 if L.prob.params.spectral:
43 if hasattr(L.prob, 'ncomp'):
44 tmp1 = newDistArray(L.prob.fft, False)
45 tmp = np.zeros(tmp1.shape + (L.prob.ncomp,))
46 for i in range(L.prob.ncomp):
47 tmp[..., i] = L.prob.fft.backward(L.u[0][..., i])
48 else:
49 tmp = L.prob.fft.backward(L.u[0])
50 else:
51 tmp = L.u[0][:]
53 # compute local offset for I/O
54 nbytes_local = tmp.nbytes
55 if self.comm is not None:
56 nbytes_global = self.comm.allgather(nbytes_local)
57 else:
58 nbytes_global = [nbytes_local]
59 local_offset = sum(nbytes_global[: self.rank])
61 # dump initial data
62 fname = f"./data/{L.prob.params.name}_{0:08d}"
63 fh = MPI.File.Open(self.comm, fname + ".dat", self.amode)
64 fh.Write_at_all(local_offset, tmp)
65 fh.Close()
67 sizes = list(L.prob.params.nvars)
68 if hasattr(L.prob, 'ncomp'):
69 sizes.append(L.prob.ncomp)
71 # write json description
72 if self.rank == 0 and step.status.slot == 0:
73 json_obj = dict()
74 json_obj['type'] = 'dataset'
75 json_obj['datatype'] = str(tmp.dtype)
76 json_obj['endian'] = str(tmp.dtype.byteorder)
77 json_obj['time'] = L.time
78 json_obj['space_comm_size'] = self.size
79 json_obj['time_comm_size'] = step.status.time_size
80 json_obj['shape'] = sizes
81 json_obj['elementsize'] = tmp.dtype.itemsize
83 with open(fname + '.json', 'w') as fp:
84 json.dump(json_obj, fp)
86 # set step count
87 self.time_step = 1
89 def post_step(self, step, level_number):
90 """
91 Overwrite standard post step hook
93 Args:
94 step (pySDC.Step.step): the current step
95 level_number (int): the current level number
96 """
97 super(dump, self).post_step(step, level_number)
99 # some abbreviations
100 L = step.levels[0]
102 # get real space values
103 if L.prob.params.spectral:
104 if hasattr(L.prob, 'ncomp'):
105 tmp1 = newDistArray(L.prob.fft, False)
106 tmp = np.zeros(tmp1.shape + (L.prob.ncomp,))
107 for i in range(L.prob.ncomp):
108 tmp[..., i] = L.prob.fft.backward(L.uend[..., i])
109 else:
110 tmp = L.prob.fft.backward(L.uend)
111 else:
112 tmp = L.uend[:]
114 # compute local offset for I/O
115 nbytes_local = tmp.nbytes
116 if self.comm is not None:
117 nbytes_global = self.comm.allgather(nbytes_local)
118 else:
119 nbytes_global = [nbytes_local]
120 local_offset = sum(nbytes_global[: self.rank])
122 # dump data
123 fname = f"./data/{L.prob.params.name}_{self.time_step + step.status.slot:08d}"
124 fh = MPI.File.Open(self.comm, fname + ".dat", self.amode)
125 fh.Write_at_all(local_offset, tmp)
126 fh.Close()
128 sizes = list(L.prob.params.nvars)
129 if hasattr(L.prob, 'ncomp'):
130 sizes.append(L.prob.ncomp)
132 # write json description
133 if self.rank == 0:
134 json_obj = dict()
135 json_obj['type'] = 'dataset'
136 json_obj['datatype'] = str(tmp.dtype)
137 json_obj['endian'] = str(tmp.dtype.byteorder)
138 json_obj['time'] = L.time + L.dt
139 json_obj['space_comm_size'] = self.size
140 json_obj['time_comm_size'] = step.status.time_size
141 json_obj['shape'] = sizes
142 json_obj['elementsize'] = tmp.dtype.itemsize
144 with open(fname + '.json', 'w') as fp:
145 json.dump(json_obj, fp)
147 # update step count
148 self.time_step += step.status.time_size