Actual source code: ex34.c
2: /*
3: Laplacian in 3D. Modeled by the partial differential equation
5: div grad u = f, 0 < x,y,z < 1,
7: with pure Neumann boundary conditions
9: u = 0 for x = 0, x = 1, y = 0, y = 1, z = 0, z = 1.
11: The functions are cell-centered
13: This uses multigrid to solve the linear system
15: Contributed by Jianming Yang <jianming-yang@uiowa.edu>
16: */
18: static char help[] = "Solves 3D Laplacian using multigrid.\n\n";
20: #include <petscdm.h>
21: #include <petscdmda.h>
22: #include <petscksp.h>
24: extern PetscErrorCode ComputeMatrix(KSP, Mat, Mat, void *);
25: extern PetscErrorCode ComputeRHS(KSP, Vec, void *);
27: int main(int argc, char **argv)
28: {
29: KSP ksp;
30: DM da;
31: PetscReal norm;
32: PetscInt i, j, k, mx, my, mz, xm, ym, zm, xs, ys, zs, d, dof;
33: PetscScalar Hx, Hy, Hz;
34: PetscScalar ****array;
35: Vec x, b, r;
36: Mat J;
38: PetscFunctionBeginUser;
39: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
40: dof = 1;
41: PetscCall(PetscOptionsGetInt(NULL, NULL, "-da_dof", &dof, NULL));
42: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
43: PetscCall(DMDACreate3d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, 12, 12, 12, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE, dof, 1, 0, 0, 0, &da));
44: PetscCall(DMSetFromOptions(da));
45: PetscCall(DMSetUp(da));
46: PetscCall(DMDASetInterpolationType(da, DMDA_Q0));
48: PetscCall(KSPSetDM(ksp, da));
50: PetscCall(KSPSetComputeRHS(ksp, ComputeRHS, NULL));
51: PetscCall(KSPSetComputeOperators(ksp, ComputeMatrix, NULL));
52: PetscCall(KSPSetFromOptions(ksp));
53: PetscCall(KSPSolve(ksp, NULL, NULL));
54: PetscCall(KSPGetSolution(ksp, &x));
55: PetscCall(KSPGetRhs(ksp, &b));
56: PetscCall(KSPGetOperators(ksp, NULL, &J));
57: PetscCall(VecDuplicate(b, &r));
59: PetscCall(MatMult(J, x, r));
60: PetscCall(VecAXPY(r, -1.0, b));
61: PetscCall(VecNorm(r, NORM_2, &norm));
62: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Residual norm %g\n", (double)norm));
64: PetscCall(DMDAGetInfo(da, 0, &mx, &my, &mz, 0, 0, 0, 0, 0, 0, 0, 0, 0));
65: Hx = 1.0 / (PetscReal)(mx);
66: Hy = 1.0 / (PetscReal)(my);
67: Hz = 1.0 / (PetscReal)(mz);
68: PetscCall(DMDAGetCorners(da, &xs, &ys, &zs, &xm, &ym, &zm));
69: PetscCall(DMDAVecGetArrayDOF(da, x, &array));
71: for (k = zs; k < zs + zm; k++) {
72: for (j = ys; j < ys + ym; j++) {
73: for (i = xs; i < xs + xm; i++) {
74: for (d = 0; d < dof; d++) array[k][j][i][d] -= PetscCosScalar(2 * PETSC_PI * (((PetscReal)i + 0.5) * Hx)) * PetscCosScalar(2 * PETSC_PI * (((PetscReal)j + 0.5) * Hy)) * PetscCosScalar(2 * PETSC_PI * (((PetscReal)k + 0.5) * Hz));
75: }
76: }
77: }
78: PetscCall(DMDAVecRestoreArrayDOF(da, x, &array));
79: PetscCall(VecAssemblyBegin(x));
80: PetscCall(VecAssemblyEnd(x));
82: PetscCall(VecNorm(x, NORM_INFINITY, &norm));
83: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error norm %g\n", (double)norm));
84: PetscCall(VecNorm(x, NORM_1, &norm));
85: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error norm %g\n", (double)(norm / ((PetscReal)(mx) * (PetscReal)(my) * (PetscReal)(mz)))));
86: PetscCall(VecNorm(x, NORM_2, &norm));
87: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error norm %g\n", (double)(norm / ((PetscReal)(mx) * (PetscReal)(my) * (PetscReal)(mz)))));
89: PetscCall(VecDestroy(&r));
90: PetscCall(KSPDestroy(&ksp));
91: PetscCall(DMDestroy(&da));
92: PetscCall(PetscFinalize());
93: return 0;
94: }
96: PetscErrorCode ComputeRHS(KSP ksp, Vec b, void *ctx)
97: {
98: PetscInt d, dof, i, j, k, mx, my, mz, xm, ym, zm, xs, ys, zs;
99: PetscScalar Hx, Hy, Hz;
100: PetscScalar ****array;
101: DM da;
102: MatNullSpace nullspace;
104: PetscFunctionBeginUser;
105: PetscCall(KSPGetDM(ksp, &da));
106: PetscCall(DMDAGetInfo(da, 0, &mx, &my, &mz, 0, 0, 0, &dof, 0, 0, 0, 0, 0));
107: Hx = 1.0 / (PetscReal)(mx);
108: Hy = 1.0 / (PetscReal)(my);
109: Hz = 1.0 / (PetscReal)(mz);
110: PetscCall(DMDAGetCorners(da, &xs, &ys, &zs, &xm, &ym, &zm));
111: PetscCall(DMDAVecGetArrayDOFWrite(da, b, &array));
112: for (k = zs; k < zs + zm; k++) {
113: for (j = ys; j < ys + ym; j++) {
114: for (i = xs; i < xs + xm; i++) {
115: for (d = 0; d < dof; d++) {
116: array[k][j][i][d] = 12 * PETSC_PI * PETSC_PI * PetscCosScalar(2 * PETSC_PI * (((PetscReal)i + 0.5) * Hx)) * PetscCosScalar(2 * PETSC_PI * (((PetscReal)j + 0.5) * Hy)) * PetscCosScalar(2 * PETSC_PI * (((PetscReal)k + 0.5) * Hz)) * Hx * Hy * Hz;
117: }
118: }
119: }
120: }
121: PetscCall(DMDAVecRestoreArrayDOFWrite(da, b, &array));
122: PetscCall(VecAssemblyBegin(b));
123: PetscCall(VecAssemblyEnd(b));
125: /* force right hand side to be consistent for singular matrix */
126: /* note this is really a hack, normally the model would provide you with a consistent right handside */
128: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace));
129: PetscCall(MatNullSpaceRemove(nullspace, b));
130: PetscCall(MatNullSpaceDestroy(&nullspace));
131: PetscFunctionReturn(PETSC_SUCCESS);
132: }
134: PetscErrorCode ComputeMatrix(KSP ksp, Mat J, Mat jac, void *ctx)
135: {
136: PetscInt dof, i, j, k, d, mx, my, mz, xm, ym, zm, xs, ys, zs, num, numi, numj, numk;
137: PetscScalar v[7], Hx, Hy, Hz, HyHzdHx, HxHzdHy, HxHydHz;
138: MatStencil row, col[7];
139: DM da;
140: MatNullSpace nullspace;
141: PetscBool dump_mat = PETSC_FALSE, check_matis = PETSC_FALSE;
143: PetscFunctionBeginUser;
144: PetscCall(KSPGetDM(ksp, &da));
145: PetscCall(DMDAGetInfo(da, 0, &mx, &my, &mz, 0, 0, 0, &dof, 0, 0, 0, 0, 0));
146: Hx = 1.0 / (PetscReal)(mx);
147: Hy = 1.0 / (PetscReal)(my);
148: Hz = 1.0 / (PetscReal)(mz);
149: HyHzdHx = Hy * Hz / Hx;
150: HxHzdHy = Hx * Hz / Hy;
151: HxHydHz = Hx * Hy / Hz;
152: PetscCall(DMDAGetCorners(da, &xs, &ys, &zs, &xm, &ym, &zm));
153: for (k = zs; k < zs + zm; k++) {
154: for (j = ys; j < ys + ym; j++) {
155: for (i = xs; i < xs + xm; i++) {
156: for (d = 0; d < dof; d++) {
157: row.i = i;
158: row.j = j;
159: row.k = k;
160: row.c = d;
161: if (i == 0 || j == 0 || k == 0 || i == mx - 1 || j == my - 1 || k == mz - 1) {
162: num = 0;
163: numi = 0;
164: numj = 0;
165: numk = 0;
166: if (k != 0) {
167: v[num] = -HxHydHz;
168: col[num].i = i;
169: col[num].j = j;
170: col[num].k = k - 1;
171: col[num].c = d;
172: num++;
173: numk++;
174: }
175: if (j != 0) {
176: v[num] = -HxHzdHy;
177: col[num].i = i;
178: col[num].j = j - 1;
179: col[num].k = k;
180: col[num].c = d;
181: num++;
182: numj++;
183: }
184: if (i != 0) {
185: v[num] = -HyHzdHx;
186: col[num].i = i - 1;
187: col[num].j = j;
188: col[num].k = k;
189: col[num].c = d;
190: num++;
191: numi++;
192: }
193: if (i != mx - 1) {
194: v[num] = -HyHzdHx;
195: col[num].i = i + 1;
196: col[num].j = j;
197: col[num].k = k;
198: col[num].c = d;
199: num++;
200: numi++;
201: }
202: if (j != my - 1) {
203: v[num] = -HxHzdHy;
204: col[num].i = i;
205: col[num].j = j + 1;
206: col[num].k = k;
207: col[num].c = d;
208: num++;
209: numj++;
210: }
211: if (k != mz - 1) {
212: v[num] = -HxHydHz;
213: col[num].i = i;
214: col[num].j = j;
215: col[num].k = k + 1;
216: col[num].c = d;
217: num++;
218: numk++;
219: }
220: v[num] = (PetscReal)(numk)*HxHydHz + (PetscReal)(numj)*HxHzdHy + (PetscReal)(numi)*HyHzdHx;
221: col[num].i = i;
222: col[num].j = j;
223: col[num].k = k;
224: col[num].c = d;
225: num++;
226: PetscCall(MatSetValuesStencil(jac, 1, &row, num, col, v, INSERT_VALUES));
227: } else {
228: v[0] = -HxHydHz;
229: col[0].i = i;
230: col[0].j = j;
231: col[0].k = k - 1;
232: col[0].c = d;
233: v[1] = -HxHzdHy;
234: col[1].i = i;
235: col[1].j = j - 1;
236: col[1].k = k;
237: col[1].c = d;
238: v[2] = -HyHzdHx;
239: col[2].i = i - 1;
240: col[2].j = j;
241: col[2].k = k;
242: col[2].c = d;
243: v[3] = 2.0 * (HyHzdHx + HxHzdHy + HxHydHz);
244: col[3].i = i;
245: col[3].j = j;
246: col[3].k = k;
247: col[3].c = d;
248: v[4] = -HyHzdHx;
249: col[4].i = i + 1;
250: col[4].j = j;
251: col[4].k = k;
252: col[4].c = d;
253: v[5] = -HxHzdHy;
254: col[5].i = i;
255: col[5].j = j + 1;
256: col[5].k = k;
257: col[5].c = d;
258: v[6] = -HxHydHz;
259: col[6].i = i;
260: col[6].j = j;
261: col[6].k = k + 1;
262: col[6].c = d;
263: PetscCall(MatSetValuesStencil(jac, 1, &row, 7, col, v, INSERT_VALUES));
264: }
265: }
266: }
267: }
268: }
269: PetscCall(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY));
270: PetscCall(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY));
271: PetscCall(PetscOptionsGetBool(NULL, NULL, "-dump_mat", &dump_mat, NULL));
272: if (dump_mat) {
273: Mat JJ;
275: PetscCall(MatComputeOperator(jac, MATAIJ, &JJ));
276: PetscCall(PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_ASCII_MATLAB));
277: PetscCall(MatView(JJ, PETSC_VIEWER_STDOUT_WORLD));
278: PetscCall(MatDestroy(&JJ));
279: }
280: PetscCall(MatViewFromOptions(jac, NULL, "-view_mat"));
281: PetscCall(PetscOptionsGetBool(NULL, NULL, "-check_matis", &check_matis, NULL));
282: if (check_matis) {
283: void (*f)(void);
284: Mat J2;
285: MatType jtype;
286: PetscReal nrm;
288: PetscCall(MatGetType(jac, &jtype));
289: PetscCall(MatConvert(jac, MATIS, MAT_INITIAL_MATRIX, &J2));
290: PetscCall(MatViewFromOptions(J2, NULL, "-view_conv"));
291: PetscCall(MatConvert(J2, jtype, MAT_INPLACE_MATRIX, &J2));
292: PetscCall(MatGetOperation(jac, MATOP_VIEW, &f));
293: PetscCall(MatSetOperation(J2, MATOP_VIEW, f));
294: PetscCall(MatSetDM(J2, da));
295: PetscCall(MatViewFromOptions(J2, NULL, "-view_conv_assembled"));
296: PetscCall(MatAXPY(J2, -1., jac, DIFFERENT_NONZERO_PATTERN));
297: PetscCall(MatNorm(J2, NORM_FROBENIUS, &nrm));
298: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error MATIS %g\n", (double)nrm));
299: PetscCall(MatViewFromOptions(J2, NULL, "-view_conv_err"));
300: PetscCall(MatDestroy(&J2));
301: }
302: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace));
303: PetscCall(MatSetNullSpace(J, nullspace));
304: PetscCall(MatNullSpaceDestroy(&nullspace));
305: PetscFunctionReturn(PETSC_SUCCESS);
306: }
308: /*TEST
310: build:
311: requires: !complex !single
313: test:
314: args: -pc_type mg -pc_mg_type full -ksp_type fgmres -ksp_monitor_short -pc_mg_levels 3 -mg_coarse_pc_factor_shift_type nonzero -ksp_view
316: test:
317: suffix: 2
318: nsize: 2
319: args: -ksp_monitor_short -da_grid_x 50 -da_grid_y 50 -pc_type ksp -ksp_ksp_type cg -ksp_pc_type bjacobi -ksp_ksp_rtol 1e-1 -ksp_ksp_monitor -ksp_type pipefgmres -ksp_gmres_restart 5
321: test:
322: suffix: hyprestruct
323: nsize: 3
324: requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
325: args: -ksp_type gmres -pc_type pfmg -dm_mat_type hyprestruct -ksp_monitor -da_refine 3
327: TEST*/