Actual source code: ex13.c
2: static char help[] = "Solves a variable Poisson problem with KSP.\n\n";
4: /*
5: Include "petscksp.h" so that we can use KSP solvers. Note that this file
6: automatically includes:
7: petscsys.h - base PETSc routines petscvec.h - vectors
8: petscmat.h - matrices
9: petscis.h - index sets petscksp.h - Krylov subspace methods
10: petscviewer.h - viewers petscpc.h - preconditioners
11: */
12: #include <petscksp.h>
14: /*
15: User-defined context that contains all the data structures used
16: in the linear solution process.
17: */
18: typedef struct {
19: Vec x, b; /* solution vector, right-hand-side vector */
20: Mat A; /* sparse matrix */
21: KSP ksp; /* linear solver context */
22: PetscInt m, n; /* grid dimensions */
23: PetscScalar hx2, hy2; /* 1/(m+1)*(m+1) and 1/(n+1)*(n+1) */
24: } UserCtx;
26: extern PetscErrorCode UserInitializeLinearSolver(PetscInt, PetscInt, UserCtx *);
27: extern PetscErrorCode UserFinalizeLinearSolver(UserCtx *);
28: extern PetscErrorCode UserDoLinearSolver(PetscScalar *, UserCtx *userctx, PetscScalar *b, PetscScalar *x);
30: int main(int argc, char **args)
31: {
32: UserCtx userctx;
33: PetscInt m = 6, n = 7, t, tmax = 2, i, Ii, j, N;
34: PetscScalar *userx, *rho, *solution, *userb, hx, hy, x, y;
35: PetscReal enorm;
37: /*
38: Initialize the PETSc libraries
39: */
40: PetscFunctionBeginUser;
41: PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
42: /*
43: The next two lines are for testing only; these allow the user to
44: decide the grid size at runtime.
45: */
46: PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
47: PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));
49: /*
50: Create the empty sparse matrix and linear solver data structures
51: */
52: PetscCall(UserInitializeLinearSolver(m, n, &userctx));
53: N = m * n;
55: /*
56: Allocate arrays to hold the solution to the linear system.
57: This is not normally done in PETSc programs, but in this case,
58: since we are calling these routines from a non-PETSc program, we
59: would like to reuse the data structures from another code. So in
60: the context of a larger application these would be provided by
61: other (non-PETSc) parts of the application code.
62: */
63: PetscCall(PetscMalloc1(N, &userx));
64: PetscCall(PetscMalloc1(N, &userb));
65: PetscCall(PetscMalloc1(N, &solution));
67: /*
68: Allocate an array to hold the coefficients in the elliptic operator
69: */
70: PetscCall(PetscMalloc1(N, &rho));
72: /*
73: Fill up the array rho[] with the function rho(x,y) = x; fill the
74: right-hand-side b[] and the solution with a known problem for testing.
75: */
76: hx = 1.0 / (m + 1);
77: hy = 1.0 / (n + 1);
78: y = hy;
79: Ii = 0;
80: for (j = 0; j < n; j++) {
81: x = hx;
82: for (i = 0; i < m; i++) {
83: rho[Ii] = x;
84: solution[Ii] = PetscSinScalar(2. * PETSC_PI * x) * PetscSinScalar(2. * PETSC_PI * y);
85: userb[Ii] = -2 * PETSC_PI * PetscCosScalar(2 * PETSC_PI * x) * PetscSinScalar(2 * PETSC_PI * y) + 8 * PETSC_PI * PETSC_PI * x * PetscSinScalar(2 * PETSC_PI * x) * PetscSinScalar(2 * PETSC_PI * y);
86: x += hx;
87: Ii++;
88: }
89: y += hy;
90: }
92: /*
93: Loop over a bunch of timesteps, setting up and solver the linear
94: system for each time-step.
96: Note this is somewhat artificial. It is intended to demonstrate how
97: one may reuse the linear solver stuff in each time-step.
98: */
99: for (t = 0; t < tmax; t++) {
100: PetscCall(UserDoLinearSolver(rho, &userctx, userb, userx));
102: /*
103: Compute error: Note that this could (and usually should) all be done
104: using the PETSc vector operations. Here we demonstrate using more
105: standard programming practices to show how they may be mixed with
106: PETSc.
107: */
108: enorm = 0.0;
109: for (i = 0; i < N; i++) enorm += PetscRealPart(PetscConj(solution[i] - userx[i]) * (solution[i] - userx[i]));
110: enorm *= PetscRealPart(hx * hy);
111: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "m %" PetscInt_FMT " n %" PetscInt_FMT " error norm %g\n", m, n, (double)enorm));
112: }
114: /*
115: We are all finished solving linear systems, so we clean up the
116: data structures.
117: */
118: PetscCall(PetscFree(rho));
119: PetscCall(PetscFree(solution));
120: PetscCall(PetscFree(userx));
121: PetscCall(PetscFree(userb));
122: PetscCall(UserFinalizeLinearSolver(&userctx));
123: PetscCall(PetscFinalize());
124: return 0;
125: }
127: /* ------------------------------------------------------------------------*/
128: PetscErrorCode UserInitializeLinearSolver(PetscInt m, PetscInt n, UserCtx *userctx)
129: {
130: PetscInt N;
132: PetscFunctionBeginUser;
133: /*
134: Here we assume use of a grid of size m x n, with all points on the
135: interior of the domain, i.e., we do not include the points corresponding
136: to homogeneous Dirichlet boundary conditions. We assume that the domain
137: is [0,1]x[0,1].
138: */
139: userctx->m = m;
140: userctx->n = n;
141: userctx->hx2 = (m + 1) * (m + 1);
142: userctx->hy2 = (n + 1) * (n + 1);
143: N = m * n;
145: /*
146: Create the sparse matrix. Preallocate 5 nonzeros per row.
147: */
148: PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF, N, N, 5, 0, &userctx->A));
150: /*
151: Create vectors. Here we create vectors with no memory allocated.
152: This way, we can use the data structures already in the program
153: by using VecPlaceArray() subroutine at a later stage.
154: */
155: PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, N, NULL, &userctx->b));
156: PetscCall(VecDuplicate(userctx->b, &userctx->x));
158: /*
159: Create linear solver context. This will be used repeatedly for all
160: the linear solves needed.
161: */
162: PetscCall(KSPCreate(PETSC_COMM_SELF, &userctx->ksp));
163: PetscFunctionReturn(PETSC_SUCCESS);
164: }
166: /*
167: Solves -div (rho grad psi) = F using finite differences.
168: rho is a 2-dimensional array of size m by n, stored in Fortran
169: style by columns. userb is a standard one-dimensional array.
170: */
171: /* ------------------------------------------------------------------------*/
172: PetscErrorCode UserDoLinearSolver(PetscScalar *rho, UserCtx *userctx, PetscScalar *userb, PetscScalar *userx)
173: {
174: PetscInt i, j, Ii, J, m = userctx->m, n = userctx->n;
175: Mat A = userctx->A;
176: PC pc;
177: PetscScalar v, hx2 = userctx->hx2, hy2 = userctx->hy2;
179: PetscFunctionBeginUser;
180: /*
181: This is not the most efficient way of generating the matrix
182: but let's not worry about it. We should have separate code for
183: the four corners, each edge and then the interior. Then we won't
184: have the slow if-tests inside the loop.
186: Computes the operator
187: -div rho grad
188: on an m by n grid with zero Dirichlet boundary conditions. The rho
189: is assumed to be given on the same grid as the finite difference
190: stencil is applied. For a staggered grid, one would have to change
191: things slightly.
192: */
193: Ii = 0;
194: for (j = 0; j < n; j++) {
195: for (i = 0; i < m; i++) {
196: if (j > 0) {
197: J = Ii - m;
198: v = -.5 * (rho[Ii] + rho[J]) * hy2;
199: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
200: }
201: if (j < n - 1) {
202: J = Ii + m;
203: v = -.5 * (rho[Ii] + rho[J]) * hy2;
204: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
205: }
206: if (i > 0) {
207: J = Ii - 1;
208: v = -.5 * (rho[Ii] + rho[J]) * hx2;
209: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
210: }
211: if (i < m - 1) {
212: J = Ii + 1;
213: v = -.5 * (rho[Ii] + rho[J]) * hx2;
214: PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
215: }
216: v = 2.0 * rho[Ii] * (hx2 + hy2);
217: PetscCall(MatSetValues(A, 1, &Ii, 1, &Ii, &v, INSERT_VALUES));
218: Ii++;
219: }
220: }
222: /*
223: Assemble matrix
224: */
225: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
226: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
228: /*
229: Set operators. Here the matrix that defines the linear system
230: also serves as the preconditioning matrix. Since all the matrices
231: will have the same nonzero pattern here, we indicate this so the
232: linear solvers can take advantage of this.
233: */
234: PetscCall(KSPSetOperators(userctx->ksp, A, A));
236: /*
237: Set linear solver defaults for this problem (optional).
238: - Here we set it to use direct LU factorization for the solution
239: */
240: PetscCall(KSPGetPC(userctx->ksp, &pc));
241: PetscCall(PCSetType(pc, PCLU));
243: /*
244: Set runtime options, e.g.,
245: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
246: These options will override those specified above as long as
247: KSPSetFromOptions() is called _after_ any other customization
248: routines.
250: Run the program with the option -help to see all the possible
251: linear solver options.
252: */
253: PetscCall(KSPSetFromOptions(userctx->ksp));
255: /*
256: This allows the PETSc linear solvers to compute the solution
257: directly in the user's array rather than in the PETSc vector.
259: This is essentially a hack and not highly recommend unless you
260: are quite comfortable with using PETSc. In general, users should
261: write their entire application using PETSc vectors rather than
262: arrays.
263: */
264: PetscCall(VecPlaceArray(userctx->x, userx));
265: PetscCall(VecPlaceArray(userctx->b, userb));
267: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
268: Solve the linear system
269: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
271: PetscCall(KSPSolve(userctx->ksp, userctx->b, userctx->x));
273: /*
274: Put back the PETSc array that belongs in the vector xuserctx->x
275: */
276: PetscCall(VecResetArray(userctx->x));
277: PetscCall(VecResetArray(userctx->b));
278: PetscFunctionReturn(PETSC_SUCCESS);
279: }
281: /* ------------------------------------------------------------------------*/
282: PetscErrorCode UserFinalizeLinearSolver(UserCtx *userctx)
283: {
284: /*
285: We are all done and don't need to solve any more linear systems, so
286: we free the work space. All PETSc objects should be destroyed when
287: they are no longer needed.
288: */
289: PetscFunctionBeginUser;
290: PetscCall(KSPDestroy(&userctx->ksp));
291: PetscCall(VecDestroy(&userctx->x));
292: PetscCall(VecDestroy(&userctx->b));
293: PetscCall(MatDestroy(&userctx->A));
294: PetscFunctionReturn(PETSC_SUCCESS);
295: }
297: /*TEST
299: test:
300: args: -m 19 -n 20 -ksp_gmres_cgs_refinement_type refine_always
302: TEST*/