Coverage for pySDC/implementations/sweeper_classes/multi_implicit.py: 86%
59 statements
« prev ^ index » next coverage.py v7.6.7, created at 2024-11-16 14:51 +0000
« prev ^ index » next coverage.py v7.6.7, created at 2024-11-16 14:51 +0000
1from pySDC.core.sweeper import Sweeper
4class multi_implicit(Sweeper):
5 """
6 Custom sweeper class, implements Sweeper.py
8 First-order multi-implicit sweeper for two components
10 Attributes:
11 Q1: implicit integration matrix for the first component
12 Q2: implicit integration matrix for the second component
13 """
15 def __init__(self, params):
16 """
17 Initialization routine for the custom sweeper
19 Args:
20 params: parameters for the sweeper
21 """
23 # Default choice: implicit Euler
24 if 'Q1' not in params:
25 params['Q1'] = 'IE'
26 if 'Q2' not in params:
27 params['Q2'] = 'IE'
29 # call parent's initialization routine
30 super(multi_implicit, self).__init__(params)
32 # Integration matrices
33 self.Q1 = self.get_Qdelta_implicit(qd_type=self.params.Q1)
34 self.Q2 = self.get_Qdelta_implicit(qd_type=self.params.Q2)
36 def integrate(self):
37 """
38 Integrates the right-hand side (two components)
40 Returns:
41 list of dtype_u: containing the integral as values
42 """
44 # get current level and problem description
45 L = self.level
46 P = L.prob
48 me = []
50 # integrate RHS over all collocation nodes
51 for m in range(1, self.coll.num_nodes + 1):
52 # new instance of dtype_u, initialize values with 0
53 me.append(P.dtype_u(P.init, val=0.0))
54 for j in range(1, self.coll.num_nodes + 1):
55 me[-1] += L.dt * self.coll.Qmat[m, j] * (L.f[j].comp1 + L.f[j].comp2)
57 return me
59 def update_nodes(self):
60 """
61 Update the u- and f-values at the collocation nodes -> corresponds to a single sweep over all nodes
63 Returns:
64 None
65 """
67 # get current level and problem description
68 L = self.level
69 P = L.prob
71 # only if the level has been touched before
72 assert L.status.unlocked
74 # get number of collocation nodes for easier access
75 M = self.coll.num_nodes
77 # gather all terms which are known already (e.g. from the previous iteration)
79 # get QF(u^k)
80 integral = self.integrate()
81 for m in range(M):
82 # subtract Q1F1(u^k)_m
83 for j in range(1, M + 1):
84 integral[m] -= L.dt * self.Q1[m + 1, j] * L.f[j].comp1
85 # add initial value
86 integral[m] += L.u[0]
87 # add tau if associated
88 if L.tau[m] is not None:
89 integral[m] += L.tau[m]
91 # store Q2F2(u^k) for later usage
92 Q2int = []
93 for m in range(M):
94 Q2int.append(P.dtype_u(P.init, val=0.0))
95 for j in range(1, M + 1):
96 Q2int[-1] += L.dt * self.Q2[m + 1, j] * L.f[j].comp2
98 # do the sweep
99 for m in range(0, M):
100 # build rhs, consisting of the known values from above and new values from previous nodes (at k+1)
101 rhs = P.dtype_u(integral[m])
102 for j in range(1, m + 1):
103 rhs += L.dt * self.Q1[m + 1, j] * L.f[j].comp1
105 # implicit solve with prefactor stemming from Q1
106 L.u[m + 1] = P.solve_system_1(
107 rhs, L.dt * self.Q1[m + 1, m + 1], L.u[m + 1], L.time + L.dt * self.coll.nodes[m]
108 )
110 # substract Q2F2(u^k) and add Q2F(u^k+1)
111 rhs = L.u[m + 1] - Q2int[m]
112 for j in range(1, m + 1):
113 rhs += L.dt * self.Q2[m + 1, j] * L.f[j].comp2
115 L.u[m + 1] = P.solve_system_2(
116 rhs,
117 L.dt * self.Q2[m + 1, m + 1],
118 L.u[m + 1], # TODO: is this a good guess?
119 L.time + L.dt * self.coll.nodes[m],
120 )
122 # update function values
123 L.f[m + 1] = P.eval_f(L.u[m + 1], L.time + L.dt * self.coll.nodes[m])
125 # indicate presence of new values at this level
126 L.status.updated = True
128 return None
130 def compute_end_point(self):
131 """
132 Compute u at the right point of the interval
134 The value uend computed here is a full evaluation of the Picard formulation unless do_full_update==False
136 Returns:
137 None
138 """
140 # get current level and problem description
141 L = self.level
142 P = L.prob
144 # check if Mth node is equal to right point and do_coll_update is false, perform a simple copy
145 if self.coll.right_is_node and not self.params.do_coll_update:
146 # a copy is sufficient
147 L.uend = P.dtype_u(L.u[-1])
148 else:
149 # start with u0 and add integral over the full interval (using coll.weights)
150 L.uend = P.dtype_u(L.u[0])
151 for m in range(self.coll.num_nodes):
152 L.uend += L.dt * self.coll.weights[m] * (L.f[m + 1].comp1 + L.f[m + 1].comp2)
153 # add up tau correction of the full interval (last entry)
154 if L.tau[-1] is not None:
155 L.uend += L.tau[-1]
157 return None