Actual source code: ex67.c
2: static char help[] = "Krylov methods to solve u'' = f in parallel with periodic boundary conditions,\n\
3: with a singular, inconsistent system.\n\n";
5: /*
7: This tests solving singular inconsistent systems with GMRES
9: Default: Solves a symmetric system
10: -symmetric false: Solves a non-symmetric system where nullspace(A) != nullspace(A')
12: -ksp_pc_side left or right
14: See the KSPSolve() for a discussion of when right preconditioning with nullspace(A) != nullspace(A') can fail to produce the
15: norm minimizing solution.
17: Note that though this example does solve the system with right preconditioning and nullspace(A) != nullspace(A') it does not produce the
18: norm minimizing solution, that is the computed solution is not orthogonal to the nullspace(A).
20: Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
21: Include "petscksp.h" so that we can use KSP solvers. Note that this
22: file automatically includes:
23: petscsys.h - base PETSc routines petscvec.h - vectors
24: petscmat.h - matrices
25: petscis.h - index sets petscksp.h - Krylov subspace methods
26: petscviewer.h - viewers petscpc.h - preconditioners
27: petscksp.h - linear solvers
28: */
30: #include <petscdm.h>
31: #include <petscdmda.h>
32: #include <petscsnes.h>
33: #include <petsc/private/kspimpl.h>
35: PetscBool symmetric = PETSC_TRUE;
37: PetscErrorCode FormMatrix(Mat, void *);
38: PetscErrorCode FormRightHandSide(Vec, void *);
40: int main(int argc, char **argv)
41: {
42: KSP ksp;
43: Mat J;
44: DM da;
45: Vec x, r; /* vectors */
46: PetscInt M = 10;
47: MatNullSpace constants, nconstants;
49: PetscFunctionBeginUser;
50: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
51: PetscCall(PetscOptionsGetInt(NULL, NULL, "-M", &M, NULL));
52: PetscCall(PetscOptionsGetBool(NULL, NULL, "-symmetric", &symmetric, NULL));
54: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
55: Create linear solver context
56: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
58: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
60: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
61: Create vector data structures; set function evaluation routine
62: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
64: /*
65: Create distributed array (DMDA) to manage parallel grid and vectors
66: */
67: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, M, 1, 2, NULL, &da));
68: PetscCall(DMSetFromOptions(da));
69: PetscCall(DMSetUp(da));
71: /*
72: Extract global and local vectors from DMDA; then duplicate for remaining
73: vectors that are the same types
74: */
75: PetscCall(DMCreateGlobalVector(da, &x));
76: PetscCall(VecDuplicate(x, &r));
78: /*
79: Set function evaluation routine and vector. Whenever the nonlinear
80: solver needs to compute the nonlinear function, it will call this
81: routine.
82: - Note that the final routine argument is the user-defined
83: context that provides application-specific data for the
84: function evaluation routine.
85: */
86: PetscCall(FormRightHandSide(r, da));
88: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
89: Create matrix data structure;
90: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
91: PetscCall(DMCreateMatrix(da, &J));
92: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &constants));
93: if (symmetric) {
94: PetscCall(MatSetOption(J, MAT_SYMMETRIC, PETSC_TRUE));
95: PetscCall(MatSetOption(J, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
96: } else {
97: Vec n;
98: PetscInt zero = 0;
99: PetscScalar zeros = 0.0;
100: PetscCall(VecDuplicate(x, &n));
101: /* the nullspace(A') is the constant vector but with a zero in the very first entry; hence nullspace(A') != nullspace(A) */
102: PetscCall(VecSet(n, 1.0));
103: PetscCall(VecSetValues(n, 1, &zero, &zeros, INSERT_VALUES));
104: PetscCall(VecAssemblyBegin(n));
105: PetscCall(VecAssemblyEnd(n));
106: PetscCall(VecNormalize(n, NULL));
107: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_FALSE, 1, &n, &nconstants));
108: PetscCall(MatSetTransposeNullSpace(J, nconstants));
109: PetscCall(MatNullSpaceDestroy(&nconstants));
110: PetscCall(VecDestroy(&n));
111: }
112: PetscCall(MatSetNullSpace(J, constants));
113: PetscCall(FormMatrix(J, da));
115: PetscCall(KSPSetOperators(ksp, J, J));
117: PetscCall(KSPSetFromOptions(ksp));
118: PetscCall(KSPSolve(ksp, r, x));
119: PetscCall(KSPSolveTranspose(ksp, r, x));
121: PetscCall(VecDestroy(&x));
122: PetscCall(VecDestroy(&r));
123: PetscCall(MatDestroy(&J));
124: PetscCall(MatNullSpaceDestroy(&constants));
125: PetscCall(KSPDestroy(&ksp));
126: PetscCall(DMDestroy(&da));
127: PetscCall(PetscFinalize());
128: return 0;
129: }
131: /*
133: This intentionally includes something in the right hand side that is not in the range of the matrix A.
134: Since MatSetNullSpace() is called and the matrix is symmetric; the Krylov method automatically removes this
135: portion of the right hand side before solving the linear system.
136: */
137: PetscErrorCode FormRightHandSide(Vec f, void *ctx)
138: {
139: DM da = (DM)ctx;
140: PetscScalar *ff;
141: PetscInt i, M, xs, xm;
142: PetscReal h;
144: PetscFunctionBeginUser;
145: PetscCall(DMDAVecGetArray(da, f, &ff));
147: /*
148: Get local grid boundaries (for 1-dimensional DMDA):
149: xs, xm - starting grid index, width of local grid (no ghost points)
150: */
151: PetscCall(DMDAGetCorners(da, &xs, NULL, NULL, &xm, NULL, NULL));
152: PetscCall(DMDAGetInfo(da, NULL, &M, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
154: /*
155: Compute function over locally owned part of the grid
156: Note the [i-1] and [i+1] will automatically access the ghost points from other processes or the periodic points.
157: */
158: h = 1.0 / M;
159: for (i = xs; i < xs + xm; i++) ff[i] = -PetscSinReal(2.0 * PETSC_PI * i * h) + 1.0;
161: /*
162: Restore vectors
163: */
164: PetscCall(DMDAVecRestoreArray(da, f, &ff));
165: PetscFunctionReturn(PETSC_SUCCESS);
166: }
167: /* ------------------------------------------------------------------- */
168: PetscErrorCode FormMatrix(Mat jac, void *ctx)
169: {
170: PetscScalar A[3];
171: PetscInt i, M, xs, xm;
172: DM da = (DM)ctx;
173: MatStencil row, cols[3];
174: PetscReal h;
176: PetscFunctionBeginUser;
177: PetscCall(DMDAGetCorners(da, &xs, NULL, NULL, &xm, NULL, NULL));
179: /*
180: Get range of locally owned matrix
181: */
182: PetscCall(DMDAGetInfo(da, NULL, &M, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
184: PetscCall(MatZeroEntries(jac));
185: h = 1.0 / M;
186: /* because of periodic boundary conditions we can simply loop over all local nodes and access to the left and right */
187: if (symmetric) {
188: for (i = xs; i < xs + xm; i++) {
189: row.i = i;
190: cols[0].i = i - 1;
191: cols[1].i = i;
192: cols[2].i = i + 1;
193: A[0] = A[2] = 1.0 / (h * h);
194: A[1] = -2.0 / (h * h);
195: PetscCall(MatSetValuesStencil(jac, 1, &row, 3, cols, A, ADD_VALUES));
196: }
197: } else {
198: MatStencil *acols;
199: PetscScalar *avals;
201: /* only works for one process */
202: PetscCall(MatSetOption(jac, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_FALSE));
203: row.i = 0;
204: PetscCall(PetscMalloc1(M, &acols));
205: PetscCall(PetscMalloc1(M, &avals));
206: for (i = 0; i < M; i++) {
207: acols[i].i = i;
208: avals[i] = (i % 2) ? 1 : -1;
209: }
210: PetscCall(MatSetValuesStencil(jac, 1, &row, M, acols, avals, ADD_VALUES));
211: PetscCall(PetscFree(acols));
212: PetscCall(PetscFree(avals));
213: row.i = 1;
214: cols[0].i = -1;
215: cols[1].i = 1;
216: cols[2].i = 1 + 1;
217: A[0] = A[2] = 1.0 / (h * h);
218: A[1] = -2.0 / (h * h);
219: PetscCall(MatSetValuesStencil(jac, 1, &row, 3, cols, A, ADD_VALUES));
220: for (i = 2; i < xs + xm - 1; i++) {
221: row.i = i;
222: cols[0].i = i - 1;
223: cols[1].i = i;
224: cols[2].i = i + 1;
225: A[0] = A[2] = 1.0 / (h * h);
226: A[1] = -2.0 / (h * h);
227: PetscCall(MatSetValuesStencil(jac, 1, &row, 3, cols, A, ADD_VALUES));
228: }
229: row.i = M - 1;
230: cols[0].i = M - 2;
231: cols[1].i = M - 1;
232: cols[2].i = M + 1;
233: A[0] = A[2] = 1.0 / (h * h);
234: A[1] = -2.0 / (h * h);
235: PetscCall(MatSetValuesStencil(jac, 1, &row, 3, cols, A, ADD_VALUES));
236: }
237: PetscCall(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY));
238: PetscCall(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY));
239: PetscFunctionReturn(PETSC_SUCCESS);
240: }
242: /*TEST
244: test:
245: suffix: nonsymmetric_left
246: args: -symmetric false -ksp_view -ksp_converged_reason -pc_type jacobi -mat_no_inode -ksp_monitor_true_residual -ksp_rtol 1.e-14 -ksp_max_it 12 -ksp_pc_side left
247: filter: sed 's/ATOL/RTOL/g'
248: requires: !single
250: test:
251: suffix: nonsymmetric_right
252: args: -symmetric false -ksp_view -ksp_converged_reason -pc_type jacobi -mat_no_inode -ksp_monitor_true_residual -ksp_rtol 1.e-14 -ksp_max_it 12 -ksp_pc_side right
253: filter: sed 's/ATOL/RTOL/g'
254: requires: !single
256: test:
257: suffix: symmetric_left
258: args: -ksp_view -ksp_converged_reason -pc_type sor -mat_no_inode -ksp_monitor_true_residual -ksp_rtol 1.e-14 -ksp_max_it 12 -ksp_pc_side left
259: requires: !single
261: test:
262: suffix: symmetric_right
263: args: -ksp_view -ksp_converged_reason -pc_type sor -mat_no_inode -ksp_monitor_true_residual -ksp_rtol 1.e-14 -ksp_max_it 12 -ksp_pc_side right
264: filter: sed 's/ATOL/RTOL/g'
265: requires: !single
267: test:
268: suffix: transpose_asm
269: args: -symmetric false -ksp_monitor -ksp_view -pc_type asm -sub_pc_type lu -sub_pc_factor_zeropivot 1.e-33 -ksp_converged_reason
270: filter: sed 's/ATOL/RTOL/g'
272: TEST*/