Actual source code: ex40.c
2: static char help[] = "Solves a linear system in parallel with KSP.\n\
3: Input parameters include:\n\
4: -random_exact_sol : use a random exact solution vector\n\
5: -view_exact_sol : write exact solution vector to stdout\n\
6: -m <mesh_x> : number of mesh points in x-direction\n\
7: -n <mesh_y> : number of mesh points in y-direction\n\n";
9: /*
10: Include "petscksp.h" so that we can use KSP solvers. Note that this file
11: automatically includes:
12: petscsys.h - base PETSc routines petscvec.h - vectors
13: petscmat.h - matrices
14: petscis.h - index sets petscksp.h - Krylov subspace methods
15: petscviewer.h - viewers petscpc.h - preconditioners
16: */
17: #include <petscksp.h>
19: int main(int argc, char **args)
20: {
21: Vec x, b, u; /* approx solution, RHS, exact solution */
22: Mat A; /* linear system matrix */
23: KSP ksp; /* linear solver context */
24: PetscRandom rctx; /* random number generator context */
25: PetscReal norm; /* norm of solution error */
26: PetscInt i, j, Ii, J, m = 8, n = 7, its;
27: PetscBool flg = PETSC_FALSE;
28: PetscScalar v;
29: PetscMPIInt rank;
31: PetscFunctionBeginUser;
32: PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
33: PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
34: PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));
35: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
36: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
37: Compute the matrix and right-hand-side vector that define
38: the linear system, Ax = b.
39: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
40: /*
41: Create parallel matrix, specifying only its global dimensions.
42: When using MatCreate(), the matrix format can be specified at
43: runtime. Also, the parallel partitioning of the matrix is
44: determined by PETSc at runtime.
46: Performance tuning note: For problems of substantial size,
47: preallocation of matrix memory is crucial for attaining good
48: performance. See the matrix chapter of the users manual for details.
49: */
50: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
51: PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n));
52: PetscCall(MatSetType(A, MATELEMENTAL));
53: PetscCall(MatSetFromOptions(A));
54: PetscCall(MatSetUp(A));
55: if (rank == 0) {
56: PetscInt M, N;
57: PetscCall(MatGetSize(A, &M, &N));
58: for (Ii = 0; Ii < M; Ii++) {
59: v = -1.0;
60: i = Ii / n;
61: j = Ii - i * n;
62: if (i > 0) {
63: J = Ii - n;
64: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, ADD_VALUES));
65: }
66: if (i < m - 1) {
67: J = Ii + n;
68: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, ADD_VALUES));
69: }
70: if (j > 0) {
71: J = Ii - 1;
72: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, ADD_VALUES));
73: }
74: if (j < n - 1) {
75: J = Ii + 1;
76: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, ADD_VALUES));
77: }
78: v = 4.0;
79: PetscCall(MatSetValues(A, 1, &Ii, 1, &Ii, &v, ADD_VALUES));
80: }
81: }
82: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
83: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
85: /* PetscCall(MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE)); */
87: /*
88: Create parallel vectors.
89: - We form 1 vector from scratch and then duplicate as needed.
90: - When using VecCreate(), VecSetSizes and VecSetFromOptions()
91: in this example, we specify only the
92: vector's global dimension; the parallel partitioning is determined
93: at runtime.
94: - When solving a linear system, the vectors and matrices MUST
95: be partitioned accordingly. PETSc automatically generates
96: appropriately partitioned matrices and vectors when MatCreate()
97: and VecCreate() are used with the same communicator.
98: - The user can alternatively specify the local vector and matrix
99: dimensions when more sophisticated partitioning is needed
100: (replacing the PETSC_DECIDE argument in the VecSetSizes() statement
101: below).
102: */
103: PetscCall(VecCreate(PETSC_COMM_WORLD, &u));
104: PetscCall(VecSetSizes(u, PETSC_DECIDE, m * n));
105: PetscCall(VecSetFromOptions(u));
106: PetscCall(VecDuplicate(u, &b));
107: PetscCall(VecDuplicate(b, &x));
109: /*
110: Set exact solution; then compute right-hand-side vector.
111: By default we use an exact solution of a vector with all
112: elements of 1.0; Alternatively, using the runtime option
113: -random_sol forms a solution vector with random components.
114: */
115: PetscCall(PetscOptionsGetBool(NULL, NULL, "-random_exact_sol", &flg, NULL));
116: if (flg) {
117: PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rctx));
118: PetscCall(PetscRandomSetFromOptions(rctx));
119: PetscCall(VecSetRandom(u, rctx));
120: PetscCall(PetscRandomDestroy(&rctx));
121: } else {
122: PetscCall(VecSet(u, 1.0));
123: }
124: PetscCall(MatMult(A, u, b));
126: /*
127: View the exact solution vector if desired
128: */
129: flg = PETSC_FALSE;
130: PetscCall(PetscOptionsGetBool(NULL, NULL, "-view_exact_sol", &flg, NULL));
131: if (flg) PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
133: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
134: Create the linear solver and set various options
135: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
137: /*
138: Create linear solver context
139: */
140: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
142: /*
143: Set operators. Here the matrix that defines the linear system
144: also serves as the preconditioning matrix.
145: */
146: PetscCall(KSPSetOperators(ksp, A, A));
148: /*
149: Set linear solver defaults for this problem (optional).
150: - By extracting the KSP and PC contexts from the KSP context,
151: we can then directly call any KSP and PC routines to set
152: various options.
153: - The following two statements are optional; all of these
154: parameters could alternatively be specified at runtime via
155: KSPSetFromOptions(). All of these defaults can be
156: overridden at runtime, as indicated below.
157: */
158: PetscCall(KSPSetTolerances(ksp, 1.e-2 / ((m + 1) * (n + 1)), PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
160: /*
161: Set runtime options, e.g.,
162: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
163: These options will override those specified above as long as
164: KSPSetFromOptions() is called _after_ any other customization
165: routines.
166: */
167: PetscCall(KSPSetFromOptions(ksp));
169: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
170: Solve the linear system
171: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
173: PetscCall(KSPSolve(ksp, b, x));
175: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
176: Check solution and clean up
177: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
179: /*
180: Check the error
181: */
182: PetscCall(VecAXPY(x, -1.0, u));
183: PetscCall(VecNorm(x, NORM_2, &norm));
184: PetscCall(KSPGetIterationNumber(ksp, &its));
186: /*
187: Print convergence information. PetscPrintf() produces a single
188: print statement from all processes that share a communicator.
189: An alternative is PetscFPrintf(), which prints to a file.
190: */
191: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error %g iterations %" PetscInt_FMT "\n", (double)norm, its));
193: /*
194: Free work space. All PETSc objects should be destroyed when they
195: are no longer needed.
196: */
197: PetscCall(KSPDestroy(&ksp));
198: PetscCall(VecDestroy(&u));
199: PetscCall(VecDestroy(&x));
200: PetscCall(VecDestroy(&b));
201: PetscCall(MatDestroy(&A));
203: /*
204: Always call PetscFinalize() before exiting a program. This routine
205: - finalizes the PETSc libraries as well as MPI
206: - provides summary and diagnostic information if certain runtime
207: options are chosen (e.g., -log_view).
208: */
209: PetscCall(PetscFinalize());
210: return 0;
211: }
213: /*TEST
215: test:
216: nsize: 6
217: args: -pc_type none
218: requires: elemental
220: test:
221: suffix: 2
222: nsize: 6
223: args: -pc_type lu -pc_factor_mat_solver_type elemental
224: requires: elemental
226: TEST*/