Actual source code: ex18.c
1: static const char help[] = "Solves a (permuted) linear system in parallel with KSP.\n\
2: Input parameters include:\n\
3: -permute <natural,rcm,nd,...> : solve system in permuted indexing\n\
4: -random_exact_sol : use a random exact solution vector\n\
5: -view_exact_sol : write exact solution vector to stdout\n\
6: -m <mesh_x> : number of mesh points in x-direction\n\
7: -n <mesh_y> : number of mesh points in y-direction\n\n";
9: /*
10: Include "petscksp.h" so that we can use KSP solvers. Note that this file
11: automatically includes:
12: petscsys.h - base PETSc routines petscvec.h - vectors
13: petscmat.h - matrices
14: petscis.h - index sets petscksp.h - Krylov subspace methods
15: petscviewer.h - viewers petscpc.h - preconditioners
16: */
17: #include <petscksp.h>
19: int main(int argc, char **args)
20: {
21: Vec x, b, u; /* approx solution, RHS, exact solution */
22: Mat A; /* linear system matrix */
23: KSP ksp; /* linear solver context */
24: PetscRandom rctx; /* random number generator context */
25: PetscReal norm; /* norm of solution error */
26: PetscInt i, j, Ii, J, Istart, Iend, m, n, its;
27: PetscBool random_exact_sol, view_exact_sol, permute;
28: char ordering[256] = MATORDERINGRCM;
29: IS rowperm = NULL, colperm = NULL;
30: PetscScalar v;
31: #if defined(PETSC_USE_LOG)
32: PetscLogStage stage;
33: #endif
35: PetscFunctionBeginUser;
36: PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
37: PetscOptionsBegin(PETSC_COMM_WORLD, NULL, "Poisson example options", "");
38: {
39: m = 8;
40: PetscCall(PetscOptionsInt("-m", "Number of grid points in x direction", "", m, &m, NULL));
41: n = m - 1;
42: PetscCall(PetscOptionsInt("-n", "Number of grid points in y direction", "", n, &n, NULL));
43: random_exact_sol = PETSC_FALSE;
44: PetscCall(PetscOptionsBool("-random_exact_sol", "Choose a random exact solution", "", random_exact_sol, &random_exact_sol, NULL));
45: view_exact_sol = PETSC_FALSE;
46: PetscCall(PetscOptionsBool("-view_exact_sol", "View exact solution", "", view_exact_sol, &view_exact_sol, NULL));
47: permute = PETSC_FALSE;
48: PetscCall(PetscOptionsFList("-permute", "Permute matrix and vector to solving in new ordering", "", MatOrderingList, ordering, ordering, sizeof(ordering), &permute));
49: }
50: PetscOptionsEnd();
51: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
52: Compute the matrix and right-hand-side vector that define
53: the linear system, Ax = b.
54: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
55: /*
56: Create parallel matrix, specifying only its global dimensions.
57: When using MatCreate(), the matrix format can be specified at
58: runtime. Also, the parallel partitioning of the matrix is
59: determined by PETSc at runtime.
61: Performance tuning note: For problems of substantial size,
62: preallocation of matrix memory is crucial for attaining good
63: performance. See the matrix chapter of the users manual for details.
64: */
65: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
66: PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n));
67: PetscCall(MatSetFromOptions(A));
68: PetscCall(MatMPIAIJSetPreallocation(A, 5, NULL, 5, NULL));
69: PetscCall(MatSeqAIJSetPreallocation(A, 5, NULL));
70: PetscCall(MatSetUp(A));
72: /*
73: Currently, all PETSc parallel matrix formats are partitioned by
74: contiguous chunks of rows across the processors. Determine which
75: rows of the matrix are locally owned.
76: */
77: PetscCall(MatGetOwnershipRange(A, &Istart, &Iend));
79: /*
80: Set matrix elements for the 2-D, five-point stencil in parallel.
81: - Each processor needs to insert only elements that it owns
82: locally (but any non-local elements will be sent to the
83: appropriate processor during matrix assembly).
84: - Always specify global rows and columns of matrix entries.
86: Note: this uses the less common natural ordering that orders first
87: all the unknowns for x = h then for x = 2h etc; Hence you see J = Ii +- n
88: instead of J = I +- m as you might expect. The more standard ordering
89: would first do all variables for y = h, then y = 2h etc.
91: */
92: PetscCall(PetscLogStageRegister("Assembly", &stage));
93: PetscCall(PetscLogStagePush(stage));
94: for (Ii = Istart; Ii < Iend; Ii++) {
95: v = -1.0;
96: i = Ii / n;
97: j = Ii - i * n;
98: if (i > 0) {
99: J = Ii - n;
100: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
101: }
102: if (i < m - 1) {
103: J = Ii + n;
104: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
105: }
106: if (j > 0) {
107: J = Ii - 1;
108: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
109: }
110: if (j < n - 1) {
111: J = Ii + 1;
112: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
113: }
114: v = 4.0;
115: PetscCall(MatSetValues(A, 1, &Ii, 1, &Ii, &v, INSERT_VALUES));
116: }
118: /*
119: Assemble matrix, using the 2-step process:
120: MatAssemblyBegin(), MatAssemblyEnd()
121: Computations can be done while messages are in transition
122: by placing code between these two statements.
123: */
124: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
125: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
126: PetscCall(PetscLogStagePop());
128: /* A is symmetric. Set symmetric flag to enable ICC/Cholesky preconditioner */
129: PetscCall(MatSetOption(A, MAT_SYMMETRIC, PETSC_TRUE));
131: /*
132: Create parallel vectors.
133: - We form 1 vector from scratch and then duplicate as needed.
134: - When using VecCreate(), VecSetSizes and VecSetFromOptions()
135: in this example, we specify only the
136: vector's global dimension; the parallel partitioning is determined
137: at runtime.
138: - When solving a linear system, the vectors and matrices MUST
139: be partitioned accordingly. PETSc automatically generates
140: appropriately partitioned matrices and vectors when MatCreate()
141: and VecCreate() are used with the same communicator.
142: - The user can alternatively specify the local vector and matrix
143: dimensions when more sophisticated partitioning is needed
144: (replacing the PETSC_DECIDE argument in the VecSetSizes() statement
145: below).
146: */
147: PetscCall(VecCreate(PETSC_COMM_WORLD, &u));
148: PetscCall(VecSetSizes(u, PETSC_DECIDE, m * n));
149: PetscCall(VecSetFromOptions(u));
150: PetscCall(VecDuplicate(u, &b));
151: PetscCall(VecDuplicate(b, &x));
153: /*
154: Set exact solution; then compute right-hand-side vector.
155: By default we use an exact solution of a vector with all
156: elements of 1.0; Alternatively, using the runtime option
157: -random_sol forms a solution vector with random components.
158: */
159: if (random_exact_sol) {
160: PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rctx));
161: PetscCall(PetscRandomSetFromOptions(rctx));
162: PetscCall(VecSetRandom(u, rctx));
163: PetscCall(PetscRandomDestroy(&rctx));
164: } else {
165: PetscCall(VecSet(u, 1.0));
166: }
167: PetscCall(MatMult(A, u, b));
169: /*
170: View the exact solution vector if desired
171: */
172: if (view_exact_sol) PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
174: if (permute) {
175: Mat Aperm;
176: PetscCall(MatGetOrdering(A, ordering, &rowperm, &colperm));
177: PetscCall(MatPermute(A, rowperm, colperm, &Aperm));
178: PetscCall(VecPermute(b, rowperm, PETSC_FALSE));
179: PetscCall(MatDestroy(&A));
180: A = Aperm; /* Replace original operator with permuted version */
181: }
183: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
184: Create the linear solver and set various options
185: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
187: /*
188: Create linear solver context
189: */
190: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
192: /*
193: Set operators. Here the matrix that defines the linear system
194: also serves as the preconditioning matrix.
195: */
196: PetscCall(KSPSetOperators(ksp, A, A));
198: /*
199: Set linear solver defaults for this problem (optional).
200: - By extracting the KSP and PC contexts from the KSP context,
201: we can then directly call any KSP and PC routines to set
202: various options.
203: - The following two statements are optional; all of these
204: parameters could alternatively be specified at runtime via
205: KSPSetFromOptions(). All of these defaults can be
206: overridden at runtime, as indicated below.
207: */
208: PetscCall(KSPSetTolerances(ksp, 1.e-2 / ((m + 1) * (n + 1)), PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
210: /*
211: Set runtime options, e.g.,
212: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
213: These options will override those specified above as long as
214: KSPSetFromOptions() is called _after_ any other customization
215: routines.
216: */
217: PetscCall(KSPSetFromOptions(ksp));
219: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
220: Solve the linear system
221: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
223: PetscCall(KSPSolve(ksp, b, x));
225: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
226: Check solution and clean up
227: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
229: if (permute) PetscCall(VecPermute(x, colperm, PETSC_TRUE));
231: /*
232: Check the error
233: */
234: PetscCall(VecAXPY(x, -1.0, u));
235: PetscCall(VecNorm(x, NORM_2, &norm));
236: PetscCall(KSPGetIterationNumber(ksp, &its));
238: /*
239: Print convergence information. PetscPrintf() produces a single
240: print statement from all processes that share a communicator.
241: An alternative is PetscFPrintf(), which prints to a file.
242: */
243: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error %g iterations %" PetscInt_FMT "\n", (double)norm, its));
245: /*
246: Free work space. All PETSc objects should be destroyed when they
247: are no longer needed.
248: */
249: PetscCall(KSPDestroy(&ksp));
250: PetscCall(VecDestroy(&u));
251: PetscCall(VecDestroy(&x));
252: PetscCall(VecDestroy(&b));
253: PetscCall(MatDestroy(&A));
254: PetscCall(ISDestroy(&rowperm));
255: PetscCall(ISDestroy(&colperm));
257: /*
258: Always call PetscFinalize() before exiting a program. This routine
259: - finalizes the PETSc libraries as well as MPI
260: - provides summary and diagnostic information if certain runtime
261: options are chosen (e.g., -log_view).
262: */
263: PetscCall(PetscFinalize());
264: return 0;
265: }
267: /*TEST
269: test:
270: nsize: 3
271: args: -m 39 -n 18 -ksp_monitor_short -permute nd
272: requires: !single
274: test:
275: suffix: 2
276: nsize: 3
277: args: -m 39 -n 18 -ksp_monitor_short -permute rcm
278: requires: !single
280: test:
281: suffix: 3
282: nsize: 3
283: args: -m 13 -n 17 -ksp_monitor_short -ksp_type cg -ksp_cg_single_reduction
284: requires: !single
286: test:
287: suffix: bas
288: args: -m 13 -n 17 -ksp_monitor_short -ksp_type cg -pc_type icc -pc_factor_mat_solver_type bas -ksp_view -pc_factor_levels 1
289: filter: grep -v "variant "
290: requires: !single
292: TEST*/