Coverage for pySDC / helpers / fieldsIO.py: 75%

308 statements  

« prev     ^ index     » next       coverage.py v7.13.5, created at 2026-03-27 07:06 +0000

1#!/usr/bin/env python3 

2# -*- coding: utf-8 -*- 

3""" 

4Generic utility class to write and read cartesian grid field solutions into binary files. 

5It implements the base file handler class :class:`FieldsIO`, that is specialized into : 

6 

7- :class:`Scalar` : for 0D fields (scalar) with a given number of variables 

8- :class:`Rectilinear` : for fields on N-dimensional rectilinear grids 

9 

10While each file handler need to be setup with specific parameters (grid, ...), 

11each written file can be read using the same interface implemented in the 

12base abstract class. 

13 

14Example 

15------- 

16>>> import numpy as np 

17>>> from pySDC.helpers.fieldsIO import Rectilinear 

18>>> 

19>>> # Write some fields in files 

20>>> x = np.linspace(0, 1, 128) 

21>>> y = np.linspace(0, 1, 64) 

22>>> fOut = Rectilinear(np.float64, "file.pysdc") 

23>>> fOut.setHeader(nVar=2, coords=[x, y]) 

24>>> fOut.initialize() 

25>>> times = [0, 1, 2] 

26>>> xGrid, yGrid = np.meshgrid(x, y, indexing="ij") 

27>>> u0 = np.array([-1, 1]).reshape((-1, 1, 1))*xGrid*yGrid 

28>>> # u0 has shape [2, nX, nY] 

29>>> for t in times: 

30>>> fOut.addField(t, t*u0) 

31>>> 

32>>> # Read the file using the generic interface 

33>>> from pySDC.helpers.fieldsIO import FieldsIO 

34>>> fIn = FieldsIO.fromFile("file.pysdc") 

35>>> times = fIn.times 

36>>> assert len(times) == fIn.nFields 

37>>> tEnd, uEnd = fIn.readField(-1) 

38>>> assert tEnd == times[-1] 

39 

40Notes 

41----- 

42🚀 :class:`Rectilinear` is compatible with a MPI-based cartesian decomposition. 

43See :class:`pySDC.helpers.fieldsIO.writeFields_MPI` for an illustrative example. 

44 

45Warning 

46------- 

47To use MPI collective writing, you need to call first the class methods :class:`Rectilinear.setupMPI` (cf their docstring). 

48Also, `Rectilinear.setHeader` **must be given the global grids coordinates**, whether the code is run in parallel or not. 

49""" 

50 

51import os 

52import numpy as np 

53from typing import Type, TypeVar 

54import logging 

55 

56T = TypeVar("T") 

57 

58try: 

59 try: 

60 import dolfin as df # noqa: F841 (for some reason, dolfin always needs to be imported before mpi4py) 

61 except ImportError: 

62 pass 

63 from mpi4py import MPI 

64 from mpi4py.util.dtlib import from_numpy_dtype as MPI_DTYPE 

65except ImportError: 

66 

67 class MPI: 

68 COMM_WORLD = None 

69 Intracomm = T 

70 File = T 

71 Datatype = T 

72 

73 def MPI_DTYPE(): 

74 pass 

75 

76 

77# Supported data types 

78DTYPES = { 

79 0: np.float64, # double precision 

80 1: np.complex128, 

81} 

82try: 

83 DTYPES.update( 

84 { 

85 2: np.float128, # quadruple precision 

86 3: np.complex256, 

87 } 

88 ) 

89except AttributeError: 

90 logging.getLogger('FieldsIO').debug('Warning: Quadruple precision not available on this machine') 

91try: 

92 DTYPES.update( 

93 { 

94 4: np.float32, # single precision 

95 5: np.complex64, 

96 } 

97 ) 

98except AttributeError: 

99 logging.getLogger('FieldsIO').debug('Warning: Single precision not available on this machine') 

100 

101DTYPES_AVAIL = {val: key for key, val in DTYPES.items()} 

102 

103# Header dtype 

104H_DTYPE = np.int8 

105T_DTYPE = np.float64 

106 

107 

108class FieldsIO: 

109 """Abstract IO file handler""" 

110 

111 STRUCTS = {} # supported structures, modified dynamically 

112 sID = None # structure ID of the FieldsIO class, modified dynamically 

113 

114 tSize = T_DTYPE().itemsize 

115 

116 ALLOW_OVERWRITE = False 

117 

118 def __init__(self, dtype, fileName): 

119 """ 

120 Parameters 

121 ---------- 

122 dtype : np.dtype 

123 The data type of the fields values. 

124 fileName : str 

125 File. 

126 """ 

127 assert dtype in DTYPES_AVAIL, f"{dtype=} not available. Supported on this machine: {list(DTYPES_AVAIL.keys())}" 

128 self.dtype = dtype 

129 self.fileName = fileName 

130 self.initialized = False 

131 

132 # Initialized by the setHeader abstract method 

133 self.header = None 

134 self.nItems = None # number of values (dof) stored into one field 

135 

136 def __str__(self): 

137 return f"FieldsIO[{self.__class__.__name__}|{self.dtype.__name__}|file:{self.fileName}]<{hex(id(self))}>" 

138 

139 def __repr__(self): 

140 return self.__str__() 

141 

142 @classmethod 

143 def fromFile(cls, fileName): 

144 """ 

145 Read a file storing fields, and return the `FieldsIO` of the appropriate 

146 field type (structure). 

147 

148 Parameters 

149 ---------- 

150 fileName : str 

151 Name of the binary file. 

152 

153 Returns 

154 ------- 

155 fieldsIO : :class:`FieldsIO` 

156 The specialized `FieldsIO` adapted to the file. 

157 """ 

158 if not os.path.isfile(fileName): 

159 raise FileNotFoundError(f"not a file ({fileName})") 

160 with open(fileName, "rb") as f: 

161 STRUCT, DTYPE = np.fromfile(f, dtype=H_DTYPE, count=2) 

162 fieldsIO: FieldsIO = cls.STRUCTS[STRUCT](DTYPES[DTYPE], fileName) 

163 fieldsIO.readHeader(f) 

164 fieldsIO.initialized = True 

165 return fieldsIO 

166 

167 @property 

168 def hBase(self) -> np.ndarray: 

169 """Base header into numpy array format""" 

170 return np.array([self.sID, DTYPES_AVAIL[self.dtype]], dtype=H_DTYPE) 

171 

172 @classmethod 

173 def register(cls, sID): 

174 """ 

175 Decorator used to register a new class FieldsIO specialized class 

176 

177 Parameters 

178 ---------- 

179 sID : int 

180 Unique identifyer for the file, used in the binary file. 

181 Since it's encoded on a 8-bytes signed integer, 

182 it must be between -128 and 127 

183 

184 Example 

185 ------- 

186 >>> # New specialized FieldsIO class 

187 >>> @FieldsIO.register(sID=31) 

188 >>> class HexaMesh2D(FieldsIO): 

189 >>> pass # ... implementation 

190 """ 

191 

192 def wrapper(registered: Type[T]) -> Type[T]: 

193 assert ( 

194 sID not in cls.STRUCTS 

195 ), f"struct ID already taken by {cls.STRUCTS[sID]}, cannot use it for {registered}" 

196 cls.STRUCTS[sID] = registered 

197 registered.sID = sID 

198 return registered 

199 

200 return wrapper 

201 

202 def initialize(self): 

203 """Initialize the file handler : create the file with header, removing any existing file with the same name""" 

204 assert self.header is not None, "header must be set before initializing FieldsIO" 

205 assert not self.initialized, "FieldsIO already initialized" 

206 

207 if not self.ALLOW_OVERWRITE: 

208 if os.path.isfile(self.fileName): 

209 raise FileExistsError( 

210 f"file {self.fileName!r} already exists, use FieldsIO.ALLOW_OVERWRITE = True to allow overwriting" 

211 ) 

212 

213 with open(self.fileName, "w+b") as f: 

214 self.hBase.tofile(f) 

215 for array in self.hInfos: 

216 array.tofile(f) 

217 self.initialized = True 

218 

219 def setHeader(self, **params): 

220 """(Abstract) Set the header before creating a new file to store the fields""" 

221 raise NotImplementedError() 

222 

223 @property 

224 def hInfos(self) -> list[np.ndarray]: 

225 """(Abstract) Array representing the grid structure to be written in the binary file.""" 

226 raise NotImplementedError() 

227 

228 def readHeader(self, f): 

229 """ 

230 (Abstract) Read the header from the file storing the fields. 

231 

232 Parameters 

233 ---------- 

234 f : `_io.TextIOWrapper` 

235 File to read the header from. 

236 """ 

237 raise NotImplementedError() 

238 

239 @property 

240 def hSize(self): 

241 """Size of the full header (in bytes)""" 

242 return self.hBase.nbytes + sum(hInfo.nbytes for hInfo in self.hInfos) 

243 

244 @property 

245 def itemSize(self): 

246 """Size of one field value (in bytes)""" 

247 return self.dtype().itemsize 

248 

249 @property 

250 def fSize(self): 

251 """Full size of a field (in bytes)""" 

252 return self.nItems * self.itemSize 

253 

254 @property 

255 def fileSize(self): 

256 """Current size of the file (in bytes)""" 

257 return os.path.getsize(self.fileName) 

258 

259 def addField(self, time, field): 

260 """ 

261 Append one field solution at the end of the file with one given time. 

262 

263 Parameters 

264 ---------- 

265 time : float-like 

266 The associated time of the field solution. 

267 field : np.ndarray 

268 The field values. 

269 """ 

270 assert self.initialized, "cannot add field to a non initialized FieldsIO" 

271 field = np.asarray(field) 

272 assert field.dtype == self.dtype, f"expected {self.dtype} dtype, got {field.dtype}" 

273 assert field.size == self.nItems, f"expected {self.nItems} values, got {field.size}" 

274 with open(self.fileName, "ab") as f: 

275 np.array(time, dtype=T_DTYPE).tofile(f) 

276 field.tofile(f) 

277 

278 @property 

279 def nFields(self): 

280 """Number of fields currently stored in the binary file""" 

281 return int((self.fileSize - self.hSize) // (self.tSize + self.fSize)) 

282 

283 def formatIndex(self, idx): 

284 """Utility method to format a fields index to a positional integer (negative starts from last field index, like python lists)""" 

285 nFields = self.nFields 

286 if idx < 0: 

287 idx = nFields + idx 

288 assert idx < nFields, f"cannot read index {idx} from {nFields} fields" 

289 assert idx >= 0, f"cannot read index {idx-nFields} from {nFields} fields" 

290 return idx 

291 

292 @property 

293 def times(self): 

294 """Vector of all times stored in the binary file""" 

295 times = [] 

296 with open(self.fileName, "rb") as f: 

297 f.seek(self.hSize) 

298 for i in range(self.nFields): 

299 t = np.fromfile(f, dtype=T_DTYPE, count=1, offset=0 if i == 0 else self.fSize)[0] 

300 times.append(float(t)) 

301 return times 

302 

303 def time(self, idx): 

304 """Time stored at a given field index""" 

305 idx = self.formatIndex(idx) 

306 offset = self.hSize + idx * (self.tSize + self.fSize) 

307 with open(self.fileName, "rb") as f: 

308 t = np.fromfile(f, dtype=T_DTYPE, count=1, offset=offset)[0] 

309 return float(t) 

310 

311 def readField(self, idx): 

312 """ 

313 Read one field stored in the binary file, corresponding to the given 

314 time index. 

315 

316 Parameters 

317 ---------- 

318 idx : int 

319 Positional index of the field. 

320 

321 Returns 

322 ------- 

323 t : float 

324 Stored time for this field. 

325 field : np.ndarray 

326 Read fields in a numpy array. 

327 """ 

328 idx = self.formatIndex(idx) 

329 offset = self.hSize + idx * (self.tSize + self.fSize) 

330 with open(self.fileName, "rb") as f: 

331 f.seek(offset) 

332 t = float(np.fromfile(f, dtype=T_DTYPE, count=1)[0]) 

333 field = np.fromfile(f, dtype=self.dtype, count=self.nItems) 

334 self.reshape(field) 

335 return t, field 

336 

337 def reshape(self, field): 

338 """Eventually reshape the field to correspond to the grid structure""" 

339 pass 

340 

341 

342@FieldsIO.register(sID=0) 

343class Scalar(FieldsIO): 

344 """FieldsIO handler storing a given number of scalar""" 

345 

346 # ------------------------------------------------------------------------- 

347 # Overridden methods 

348 # ------------------------------------------------------------------------- 

349 def setHeader(self, nVar): 

350 """ 

351 Set the descriptive grid structure to be stored in the file header. 

352 

353 Parameters 

354 ---------- 

355 nVar : int 

356 Number of scalar variable stored. 

357 """ 

358 self.header = {"nVar": int(nVar)} 

359 self.nItems = self.nVar 

360 

361 @property 

362 def hInfos(self): 

363 """Array representing the grid structure to be written in the binary file.""" 

364 return [np.array([self.nVar], dtype=np.int64)] 

365 

366 def readHeader(self, f): 

367 """ 

368 Read the header from the binary file storing the fields. 

369 

370 Parameters 

371 ---------- 

372 f : `_io.TextIOWrapper` 

373 File to read the header from. 

374 """ 

375 (nVar,) = np.fromfile(f, dtype=np.int64, count=1) 

376 self.setHeader(nVar) 

377 

378 # ------------------------------------------------------------------------- 

379 # Class specifics 

380 # ------------------------------------------------------------------------- 

381 @property 

382 def nVar(self): 

383 """Number of variables in a fields, as described in the header""" 

384 return self.header["nVar"] 

385 

386 

387@FieldsIO.register(sID=1) 

388class Rectilinear(Scalar): 

389 """FieldsIO handler storing a given number of scalar variables on a N-dimensional rectilinear grid""" 

390 

391 @staticmethod 

392 def setupCoords(*coords): 

393 """Utility function to setup grids in multiple dimensions, given the keyword arguments""" 

394 coords = [np.asarray(coord, dtype=np.float64) for coord in coords] 

395 for axis, coord in enumerate(coords): 

396 assert coord.ndim == 1, f"coord for {axis=} must be one dimensional" 

397 return coords 

398 

399 # ------------------------------------------------------------------------- 

400 # Overridden methods 

401 # ------------------------------------------------------------------------- 

402 def setHeader(self, nVar, coords): 

403 """ 

404 Set the descriptive grid structure to be stored in the file header. 

405 

406 Parameters 

407 ---------- 

408 nVar : int 

409 Number of 1D variables stored. 

410 coords : np.1darray or list[np.1darray] 

411 The grid coordinates in each dimensions. 

412 

413 Note 

414 ---- 

415 When used in MPI decomposition mode, all coordinate **must** be the global grid. 

416 """ 

417 if not isinstance(coords, (tuple, list)): 

418 coords = [coords] 

419 coords = self.setupCoords(*coords) 

420 self.header = {"nVar": int(nVar), "coords": coords} 

421 self.nItems = nVar * self.nDoF 

422 

423 @property 

424 def hInfos(self): 

425 """Array representing the grid structure to be written in the binary file.""" 

426 return [np.array([self.nVar, self.dim, *self.gridSizes], dtype=np.int32)] + [ 

427 np.array(coord, dtype=np.float64) for coord in self.header["coords"] 

428 ] 

429 

430 def readHeader(self, f): 

431 """ 

432 Read the header from the binary file storing the fields. 

433 

434 Parameters 

435 ---------- 

436 f : `_io.TextIOWrapper` 

437 File to read the header from. 

438 """ 

439 nVar, dim = np.fromfile(f, dtype=np.int32, count=2) 

440 gridSizes = np.fromfile(f, dtype=np.int32, count=dim) 

441 coords = [np.fromfile(f, dtype=np.float64, count=n) for n in gridSizes] 

442 self.setHeader(nVar, coords) 

443 

444 def reshape(self, fields: np.ndarray): 

445 """Reshape the fields to a N-d array (inplace operation)""" 

446 fields.shape = (self.nVar, *self.gridSizes) 

447 

448 # ------------------------------------------------------------------------- 

449 # Class specifics 

450 # ------------------------------------------------------------------------- 

451 @property 

452 def gridSizes(self): 

453 """Number of points in y direction""" 

454 return [coord.size for coord in self.header["coords"]] 

455 

456 @property 

457 def dim(self): 

458 """Number of grid dimensions""" 

459 return len(self.gridSizes) 

460 

461 @property 

462 def nDoF(self): 

463 """Number of degrees of freedom for one variable""" 

464 return np.prod(self.gridSizes) 

465 

466 def toVTR(self, baseName, varNames, idxFormat="{:06d}"): 

467 """ 

468 Convert all 3D fields stored in binary format (FieldsIO) into a list 

469 of VTR files, that can be read later with Paraview or equivalent to 

470 make videos. 

471 

472 Parameters 

473 ---------- 

474 baseName : str 

475 Base name of the VTR file. 

476 varNames : list[str] 

477 Variable names of the fields. 

478 idxFormat : str, optional 

479 Formatting string for the index of the VTR file. 

480 The default is "{:06d}". 

481 

482 Example 

483 ------- 

484 >>> # Suppose the FieldsIO object is already written into outputs.pysdc 

485 >>> import os 

486 >>> from pySDC.utils.fieldsIO import Rectilinear 

487 >>> os.makedirs("vtrFiles") # to store all VTR files into a subfolder 

488 >>> Rectilinear.fromFile("outputs.pysdc").toVTR( 

489 >>> baseName="vtrFiles/field", varNames=["u", "v", "w", "T", "p"]) 

490 """ 

491 assert self.dim == 3, "can only be used with 3D fields" 

492 from pySDC.helpers.vtkIO import writeToVTR 

493 

494 template = f"{baseName}_{idxFormat}" 

495 for i in range(self.nFields): 

496 _, u = self.readField(i) 

497 writeToVTR(template.format(i), u, self.header["coords"], varNames) 

498 

499 # ------------------------------------------------------------------------- 

500 # MPI-parallel implementation 

501 # ------------------------------------------------------------------------- 

502 comm: MPI.Intracomm = None 

503 

504 @classmethod 

505 def setupMPI(cls, comm: MPI.Intracomm, iLoc, nLoc): 

506 """ 

507 Setup the MPI mode for the files IO, considering a decomposition 

508 of the 1D grid into contiguous subintervals. 

509 

510 Parameters 

511 ---------- 

512 comm : MPI.Intracomm 

513 The space decomposition communicator. 

514 iLoc : list[int] 

515 Starting index of the local sub-domain in the global coordinates. 

516 nLoc : list[int] 

517 Number of points in the local sub-domain. 

518 """ 

519 cls.comm = comm 

520 cls.iLoc = iLoc 

521 cls.nLoc = nLoc 

522 cls.mpiFile: MPI.File = None 

523 cls.mpiType: MPI.Datatype = None 

524 cls.mpiFileType: MPI.Datatype = None 

525 

526 @property 

527 def MPI_ON(self): 

528 """Wether or not MPI is activated""" 

529 if self.comm is None: 

530 return False 

531 return self.comm.Get_size() > 1 

532 

533 @property 

534 def MPI_ROOT(self): 

535 """Wether or not the process is MPI Root""" 

536 if self.comm is None: 

537 return True 

538 return self.comm.Get_rank() == 0 

539 

540 def MPI_SETUP_FILETYPE(self): 

541 """Setup subarray masks for each processes""" 

542 self.mpiType = MPI_DTYPE(self.dtype) 

543 self.mpiFileType = self.mpiType.Create_subarray( 

544 [self.nVar, *self.gridSizes], # Global array sizes 

545 [self.nVar, *self.nLoc], # Local array sizes 

546 [0, *self.iLoc], # Global starting indices of local blocks 

547 ) 

548 self.mpiFileType.Commit() 

549 

550 def MPI_FILE_OPEN(self, mode): 

551 """Open the binary file in MPI mode""" 

552 amode = { 

553 "r": MPI.MODE_RDONLY, 

554 "a": MPI.MODE_WRONLY | MPI.MODE_APPEND, 

555 }[mode] 

556 self.mpiFile = MPI.File.Open(self.comm, self.fileName, amode) 

557 if self.mpiType is None: 

558 self.MPI_SETUP_FILETYPE() 

559 

560 def MPI_WRITE(self, data): 

561 """Write data (np.ndarray) in the binary file in MPI mode, at the current file cursor position.""" 

562 self.mpiFile.Write(data) 

563 

564 def MPI_WRITE_AT_ALL(self, offset, data: np.ndarray): 

565 """ 

566 Write data in the binary file in MPI mode, with a given offset 

567 **relative to the beginning of the file**. 

568 

569 Parameters 

570 ---------- 

571 offset : int 

572 Offset to write at, relative to the beginning of the file, in bytes. 

573 data : np.ndarray 

574 Data to be written in the binary file. 

575 """ 

576 self.mpiFile.Set_view(disp=offset, etype=self.mpiType, filetype=self.mpiFileType) 

577 self.mpiFile.Write_all(data) 

578 

579 def MPI_READ_AT_ALL(self, offset, data: np.ndarray): 

580 """ 

581 Read data from the binary file in MPI mode, with a given offset 

582 **relative to the beginning of the file**. 

583 

584 Parameters 

585 ---------- 

586 offset : int 

587 Offset to read at, relative to the beginning of the file, in bytes. 

588 data : np.ndarray 

589 Array on which to read the data from the binary file. 

590 """ 

591 self.mpiFile.Set_view(disp=offset, etype=self.mpiType, filetype=self.mpiFileType) 

592 self.mpiFile.Read_all(data) 

593 

594 def MPI_FILE_CLOSE(self): 

595 """Close the binary file in MPI mode""" 

596 self.mpiFile.Close() 

597 self.mpiFile = None 

598 

599 def initialize(self): 

600 """Initialize the binary file (write header) in MPI mode""" 

601 if self.MPI_ROOT: 

602 try: 

603 super().initialize() 

604 except AssertionError as e: 

605 if self.MPI_ON: 

606 print(f"{type(e)}: {e}") 

607 self.comm.Abort() 

608 else: 

609 raise e 

610 

611 if self.MPI_ON: 

612 self.comm.Barrier() # Important, should not be removed ! 

613 self.initialized = True 

614 

615 def addField(self, time, field): 

616 """ 

617 Append one field solution at the end of the file with one given time, 

618 possibly using MPI. 

619 

620 Parameters 

621 ---------- 

622 time : float-like 

623 The associated time of the field solution. 

624 field : np.ndarray 

625 The (local) field values. 

626 

627 Note 

628 ---- 

629 If a MPI decomposition is used, field **must be** the local field values. 

630 """ 

631 if not self.MPI_ON: 

632 return super().addField(time, field) 

633 

634 assert self.initialized, "cannot add field to a non initialized FieldsIO" 

635 

636 field = np.asarray(field) 

637 assert field.dtype == self.dtype, f"expected {self.dtype} dtype, got {field.dtype}" 

638 assert field.shape == ( 

639 self.nVar, 

640 *self.nLoc, 

641 ), f"expected {(self.nVar, *self.nLoc)} shape, got {field.shape}" 

642 

643 offset = self.fileSize 

644 self.MPI_FILE_OPEN(mode="a") 

645 

646 if self.MPI_ROOT: 

647 self.MPI_WRITE(np.array(time, dtype=T_DTYPE)) 

648 offset += self.tSize 

649 self.MPI_WRITE_AT_ALL(offset, field) 

650 self.MPI_FILE_CLOSE() 

651 

652 def readField(self, idx): 

653 """ 

654 Read one field stored in the binary file, corresponding to the given 

655 time index, using MPI in the eventuality of space parallel decomposition. 

656 

657 Parameters 

658 ---------- 

659 idx : int 

660 Positional index of the field. 

661 

662 Returns 

663 ------- 

664 t : float 

665 Stored time for this field. 

666 field : np.ndarray 

667 Read (local) fields in a numpy array. 

668 

669 Note 

670 ---- 

671 If a MPI decomposition is used, it reads and returns the local fields values only. 

672 """ 

673 if not self.MPI_ON: 

674 return super().readField(idx) 

675 

676 idx = self.formatIndex(idx) 

677 offset = self.hSize + idx * (self.tSize + self.fSize) 

678 with open(self.fileName, "rb") as f: 

679 t = float(np.fromfile(f, dtype=T_DTYPE, count=1, offset=offset)[0]) 

680 offset += self.tSize 

681 

682 field = np.empty((self.nVar, *self.nLoc), dtype=self.dtype) 

683 

684 self.MPI_FILE_OPEN(mode="r") 

685 self.MPI_READ_AT_ALL(offset, field) 

686 self.MPI_FILE_CLOSE() 

687 

688 return t, field 

689 

690 

691# ----------------------------------------------------------------------------------------------- 

692# Utility functions used for testing 

693# ----------------------------------------------------------------------------------------------- 

694def initGrid(nVar, gridSizes): 

695 dim = len(gridSizes) 

696 coords = [np.linspace(0, 1, num=n, endpoint=False) for n in gridSizes] 

697 s = [None] * dim 

698 u0 = np.array(np.arange(nVar) + 1)[(slice(None), *s)] 

699 for x in np.meshgrid(*coords, indexing="ij"): 

700 u0 = u0 * x 

701 return coords, u0 

702 

703 

704def writeFields_MPI(fileName, dtypeIdx, algo, nSteps, nVar, gridSizes): 

705 coords, u0 = initGrid(nVar, gridSizes) 

706 

707 from mpi4py import MPI 

708 from pySDC.helpers.blocks import BlockDecomposition 

709 from pySDC.helpers.fieldsIO import Rectilinear 

710 

711 comm = MPI.COMM_WORLD 

712 MPI_SIZE = comm.Get_size() 

713 MPI_RANK = comm.Get_rank() 

714 

715 blocks = BlockDecomposition(MPI_SIZE, gridSizes, algo, MPI_RANK) 

716 

717 iLoc, nLoc = blocks.localBounds 

718 Rectilinear.setupMPI(comm, iLoc, nLoc) 

719 s = [slice(i, i + n) for i, n in zip(iLoc, nLoc, strict=True)] 

720 u0 = u0[(slice(None), *s)] 

721 

722 f1 = Rectilinear(DTYPES[dtypeIdx], fileName) 

723 f1.setHeader(nVar=nVar, coords=coords) 

724 

725 u0 = np.asarray(u0, dtype=f1.dtype) 

726 f1.initialize() 

727 

728 times = np.arange(nSteps) / nSteps 

729 for t in times: 

730 ut = (u0 * t).astype(f1.dtype) 

731 f1.addField(t, ut) 

732 

733 return u0 

734 

735 

736def compareFields_MPI(fileName, u0, nSteps): 

737 from pySDC.helpers.fieldsIO import FieldsIO 

738 

739 comm = MPI.COMM_WORLD 

740 MPI_RANK = comm.Get_rank() 

741 if MPI_RANK == 0: 

742 print("Comparing fields with MPI") 

743 

744 f2 = FieldsIO.fromFile(fileName) 

745 

746 times = np.arange(nSteps) / nSteps 

747 for idx, t in enumerate(times): 

748 u1 = u0 * t 

749 t2, u2 = f2.readField(idx) 

750 assert t2 == t, f"fields[{idx}] in {f2} has incorrect time ({t2} instead of {t})" 

751 assert u2.shape == u1.shape, f"{idx}'s fields in {f2} has incorrect shape" 

752 assert np.allclose(u2, u1), f"{idx}'s fields in {f2} has incorrect values"