Actual source code: ex8.c
2: static char help[] = "Illustrates use of the preconditioner ASM.\n\
3: The Additive Schwarz Method for solving a linear system in parallel with KSP. The\n\
4: code indicates the procedure for setting user-defined subdomains. Input\n\
5: parameters include:\n\
6: -user_set_subdomain_solvers: User explicitly sets subdomain solvers\n\
7: -user_set_subdomains: Activate user-defined subdomains\n\n";
9: /*
10: Note: This example focuses on setting the subdomains for the ASM
11: preconditioner for a problem on a 2D rectangular grid. See ex1.c
12: and ex2.c for more detailed comments on the basic usage of KSP
13: (including working with matrices and vectors).
15: The ASM preconditioner is fully parallel, but currently the routine
16: PCASMCreateSubdomains2D(), which is used in this example to demonstrate
17: user-defined subdomains (activated via -user_set_subdomains), is
18: uniprocessor only.
20: This matrix in this linear system arises from the discretized Laplacian,
21: and thus is not very interesting in terms of experimenting with variants
22: of the ASM preconditioner.
23: */
25: /*
26: Include "petscksp.h" so that we can use KSP solvers. Note that this file
27: automatically includes:
28: petscsys.h - base PETSc routines petscvec.h - vectors
29: petscmat.h - matrices
30: petscis.h - index sets petscksp.h - Krylov subspace methods
31: petscviewer.h - viewers petscpc.h - preconditioners
32: */
33: #include <petscksp.h>
35: int main(int argc, char **args)
36: {
37: Vec x, b, u; /* approx solution, RHS, exact solution */
38: Mat A; /* linear system matrix */
39: KSP ksp; /* linear solver context */
40: PC pc; /* PC context */
41: IS *is, *is_local; /* array of index sets that define the subdomains */
42: PetscInt overlap = 1; /* width of subdomain overlap */
43: PetscInt Nsub; /* number of subdomains */
44: PetscInt m = 15, n = 17; /* mesh dimensions in x- and y- directions */
45: PetscInt M = 2, N = 1; /* number of subdomains in x- and y- directions */
46: PetscInt i, j, Ii, J, Istart, Iend;
47: PetscMPIInt size;
48: PetscBool flg;
49: PetscBool user_subdomains = PETSC_FALSE;
50: PetscScalar v, one = 1.0;
51: PetscReal e;
53: PetscFunctionBeginUser;
54: PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
55: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
56: PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
57: PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));
58: PetscCall(PetscOptionsGetInt(NULL, NULL, "-Mdomains", &M, NULL));
59: PetscCall(PetscOptionsGetInt(NULL, NULL, "-Ndomains", &N, NULL));
60: PetscCall(PetscOptionsGetInt(NULL, NULL, "-overlap", &overlap, NULL));
61: PetscCall(PetscOptionsGetBool(NULL, NULL, "-user_set_subdomains", &user_subdomains, NULL));
63: /* -------------------------------------------------------------------
64: Compute the matrix and right-hand-side vector that define
65: the linear system, Ax = b.
66: ------------------------------------------------------------------- */
68: /*
69: Assemble the matrix for the five point stencil, YET AGAIN
70: */
71: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
72: PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n));
73: PetscCall(MatSetFromOptions(A));
74: PetscCall(MatSetUp(A));
75: PetscCall(MatGetOwnershipRange(A, &Istart, &Iend));
76: for (Ii = Istart; Ii < Iend; Ii++) {
77: v = -1.0;
78: i = Ii / n;
79: j = Ii - i * n;
80: if (i > 0) {
81: J = Ii - n;
82: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
83: }
84: if (i < m - 1) {
85: J = Ii + n;
86: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
87: }
88: if (j > 0) {
89: J = Ii - 1;
90: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
91: }
92: if (j < n - 1) {
93: J = Ii + 1;
94: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
95: }
96: v = 4.0;
97: PetscCall(MatSetValues(A, 1, &Ii, 1, &Ii, &v, INSERT_VALUES));
98: }
99: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
100: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
102: /*
103: Create and set vectors
104: */
105: PetscCall(MatCreateVecs(A, &u, &b));
106: PetscCall(VecDuplicate(u, &x));
107: PetscCall(VecSet(u, one));
108: PetscCall(MatMult(A, u, b));
110: /*
111: Create linear solver context
112: */
113: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
115: /*
116: Set operators. Here the matrix that defines the linear system
117: also serves as the preconditioning matrix.
118: */
119: PetscCall(KSPSetOperators(ksp, A, A));
121: /*
122: Set the default preconditioner for this program to be ASM
123: */
124: PetscCall(KSPGetPC(ksp, &pc));
125: PetscCall(PCSetType(pc, PCASM));
127: /* -------------------------------------------------------------------
128: Define the problem decomposition
129: ------------------------------------------------------------------- */
131: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
132: Basic method, should be sufficient for the needs of many users.
133: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
135: Set the overlap, using the default PETSc decomposition via
136: PCASMSetOverlap(pc,overlap);
137: Could instead use the option -pc_asm_overlap <ovl>
139: Set the total number of blocks via -pc_asm_blocks <blks>
140: Note: The ASM default is to use 1 block per processor. To
141: experiment on a single processor with various overlaps, you
142: must specify use of multiple blocks!
143: */
145: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146: More advanced method, setting user-defined subdomains
147: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149: Firstly, create index sets that define the subdomains. The utility
150: routine PCASMCreateSubdomains2D() is a simple example (that currently
151: supports 1 processor only!). More generally, the user should write
152: a custom routine for a particular problem geometry.
154: Then call either PCASMSetLocalSubdomains() or PCASMSetTotalSubdomains()
155: to set the subdomains for the ASM preconditioner.
156: */
158: if (!user_subdomains) { /* basic version */
159: PetscCall(PCASMSetOverlap(pc, overlap));
160: } else { /* advanced version */
161: PetscCheck(size == 1, PETSC_COMM_WORLD, PETSC_ERR_SUP, "PCASMCreateSubdomains2D() is currently a uniprocessor routine only!");
162: PetscCall(PCASMCreateSubdomains2D(m, n, M, N, 1, overlap, &Nsub, &is, &is_local));
163: PetscCall(PCASMSetLocalSubdomains(pc, Nsub, is, is_local));
164: flg = PETSC_FALSE;
165: PetscCall(PetscOptionsGetBool(NULL, NULL, "-subdomain_view", &flg, NULL));
166: if (flg) {
167: PetscCall(PetscPrintf(PETSC_COMM_SELF, "Nmesh points: %" PetscInt_FMT " x %" PetscInt_FMT "; subdomain partition: %" PetscInt_FMT " x %" PetscInt_FMT "; overlap: %" PetscInt_FMT "; Nsub: %" PetscInt_FMT "\n", m, n, M, N, overlap, Nsub));
168: PetscCall(PetscPrintf(PETSC_COMM_SELF, "IS:\n"));
169: for (i = 0; i < Nsub; i++) {
170: PetscCall(PetscPrintf(PETSC_COMM_SELF, " IS[%" PetscInt_FMT "]\n", i));
171: PetscCall(ISView(is[i], PETSC_VIEWER_STDOUT_SELF));
172: }
173: PetscCall(PetscPrintf(PETSC_COMM_SELF, "IS_local:\n"));
174: for (i = 0; i < Nsub; i++) {
175: PetscCall(PetscPrintf(PETSC_COMM_SELF, " IS_local[%" PetscInt_FMT "]\n", i));
176: PetscCall(ISView(is_local[i], PETSC_VIEWER_STDOUT_SELF));
177: }
178: }
179: }
181: /* -------------------------------------------------------------------
182: Set the linear solvers for the subblocks
183: ------------------------------------------------------------------- */
185: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
186: Basic method, should be sufficient for the needs of most users.
187: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
189: By default, the ASM preconditioner uses the same solver on each
190: block of the problem. To set the same solver options on all blocks,
191: use the prefix -sub before the usual PC and KSP options, e.g.,
192: -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
194: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
195: Advanced method, setting different solvers for various blocks.
196: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
198: Note that each block's KSP context is completely independent of
199: the others, and the full range of uniprocessor KSP options is
200: available for each block.
202: - Use PCASMGetSubKSP() to extract the array of KSP contexts for
203: the local blocks.
204: - See ex7.c for a simple example of setting different linear solvers
205: for the individual blocks for the block Jacobi method (which is
206: equivalent to the ASM method with zero overlap).
207: */
209: flg = PETSC_FALSE;
210: PetscCall(PetscOptionsGetBool(NULL, NULL, "-user_set_subdomain_solvers", &flg, NULL));
211: if (flg) {
212: KSP *subksp; /* array of KSP contexts for local subblocks */
213: PetscInt nlocal, first; /* number of local subblocks, first local subblock */
214: PC subpc; /* PC context for subblock */
215: PetscBool isasm;
217: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "User explicitly sets subdomain solvers.\n"));
219: /*
220: Set runtime options
221: */
222: PetscCall(KSPSetFromOptions(ksp));
224: /*
225: Flag an error if PCTYPE is changed from the runtime options
226: */
227: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &isasm));
228: PetscCheck(isasm, PETSC_COMM_WORLD, PETSC_ERR_SUP, "Cannot Change the PCTYPE when manually changing the subdomain solver settings");
230: /*
231: Call KSPSetUp() to set the block Jacobi data structures (including
232: creation of an internal KSP context for each block).
234: Note: KSPSetUp() MUST be called before PCASMGetSubKSP().
235: */
236: PetscCall(KSPSetUp(ksp));
238: /*
239: Extract the array of KSP contexts for the local blocks
240: */
241: PetscCall(PCASMGetSubKSP(pc, &nlocal, &first, &subksp));
243: /*
244: Loop over the local blocks, setting various KSP options
245: for each block.
246: */
247: for (i = 0; i < nlocal; i++) {
248: PetscCall(KSPGetPC(subksp[i], &subpc));
249: PetscCall(PCSetType(subpc, PCILU));
250: PetscCall(KSPSetType(subksp[i], KSPGMRES));
251: PetscCall(KSPSetTolerances(subksp[i], 1.e-7, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
252: }
253: } else {
254: /*
255: Set runtime options
256: */
257: PetscCall(KSPSetFromOptions(ksp));
258: }
260: /* -------------------------------------------------------------------
261: Solve the linear system
262: ------------------------------------------------------------------- */
264: PetscCall(KSPSolve(ksp, b, x));
266: /* -------------------------------------------------------------------
267: Compare result to the exact solution
268: ------------------------------------------------------------------- */
269: PetscCall(VecAXPY(x, -1.0, u));
270: PetscCall(VecNorm(x, NORM_INFINITY, &e));
272: flg = PETSC_FALSE;
273: PetscCall(PetscOptionsGetBool(NULL, NULL, "-print_error", &flg, NULL));
274: if (flg) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Infinity norm of the error: %g\n", (double)e));
276: /*
277: Free work space. All PETSc objects should be destroyed when they
278: are no longer needed.
279: */
281: if (user_subdomains) {
282: for (i = 0; i < Nsub; i++) {
283: PetscCall(ISDestroy(&is[i]));
284: PetscCall(ISDestroy(&is_local[i]));
285: }
286: PetscCall(PetscFree(is));
287: PetscCall(PetscFree(is_local));
288: }
289: PetscCall(KSPDestroy(&ksp));
290: PetscCall(VecDestroy(&u));
291: PetscCall(VecDestroy(&x));
292: PetscCall(VecDestroy(&b));
293: PetscCall(MatDestroy(&A));
294: PetscCall(PetscFinalize());
295: return 0;
296: }
298: /*TEST
300: test:
301: suffix: 1
302: args: -print_error
304: TEST*/