Actual source code: ex65.c
2: /*
3: Partial differential equation
5: d d u = 1, 0 < x < 1,
6: -- --
7: dx dx
8: with boundary conditions
10: u = 0 for x = 0, x = 1
12: This uses multigrid to solve the linear system
14: Demonstrates how to build a DMSHELL for managing multigrid. The DMSHELL simply creates a
15: DMDA1d to construct all the needed PETSc objects.
17: */
19: static char help[] = "Solves 1D constant coefficient Laplacian using DMSHELL and multigrid.\n\n";
21: #include <petscdm.h>
22: #include <petscdmda.h>
23: #include <petscdmshell.h>
24: #include <petscksp.h>
26: static PetscErrorCode ComputeMatrix(KSP, Mat, Mat, void *);
27: static PetscErrorCode ComputeRHS(KSP, Vec, void *);
28: static PetscErrorCode CreateMatrix(DM, Mat *);
29: static PetscErrorCode CreateGlobalVector(DM, Vec *);
30: static PetscErrorCode CreateLocalVector(DM, Vec *);
31: static PetscErrorCode Refine(DM, MPI_Comm, DM *);
32: static PetscErrorCode Coarsen(DM, MPI_Comm, DM *);
33: static PetscErrorCode CreateInterpolation(DM, DM, Mat *, Vec *);
34: static PetscErrorCode CreateRestriction(DM, DM, Mat *);
35: static PetscErrorCode Destroy(void *);
37: static PetscErrorCode MyDMShellCreate(MPI_Comm comm, DM da, DM *shell)
38: {
39: PetscFunctionBeginUser;
40: PetscCall(DMShellCreate(comm, shell));
41: PetscCall(DMShellSetContext(*shell, da));
42: PetscCall(DMShellSetCreateMatrix(*shell, CreateMatrix));
43: PetscCall(DMShellSetCreateGlobalVector(*shell, CreateGlobalVector));
44: PetscCall(DMShellSetCreateLocalVector(*shell, CreateLocalVector));
45: PetscCall(DMShellSetRefine(*shell, Refine));
46: PetscCall(DMShellSetCoarsen(*shell, Coarsen));
47: PetscCall(DMShellSetCreateInterpolation(*shell, CreateInterpolation));
48: PetscCall(DMShellSetCreateRestriction(*shell, CreateRestriction));
49: PetscCall(DMShellSetDestroyContext(*shell, Destroy));
50: PetscFunctionReturn(PETSC_SUCCESS);
51: }
53: int main(int argc, char **argv)
54: {
55: KSP ksp;
56: DM da, shell;
57: PetscInt levels;
59: PetscFunctionBeginUser;
60: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
61: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
62: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, 129, 1, 1, 0, &da));
63: PetscCall(DMSetFromOptions(da));
64: PetscCall(DMSetUp(da));
65: PetscCall(MyDMShellCreate(PETSC_COMM_WORLD, da, &shell));
66: /* these two lines are not needed but allow PCMG to automatically know how many multigrid levels the user wants */
67: PetscCall(DMGetRefineLevel(da, &levels));
68: PetscCall(DMSetRefineLevel(shell, levels));
70: PetscCall(KSPSetDM(ksp, shell));
71: PetscCall(KSPSetComputeRHS(ksp, ComputeRHS, NULL));
72: PetscCall(KSPSetComputeOperators(ksp, ComputeMatrix, NULL));
73: PetscCall(KSPSetFromOptions(ksp));
74: PetscCall(KSPSolve(ksp, NULL, NULL));
76: PetscCall(KSPDestroy(&ksp));
77: PetscCall(DMDestroy(&shell));
78: PetscCall(PetscFinalize());
79: return 0;
80: }
82: static PetscErrorCode Destroy(void *ctx)
83: {
84: PetscFunctionBeginUser;
85: PetscCall(DMDestroy((DM *)&ctx));
86: PetscFunctionReturn(PETSC_SUCCESS);
87: }
89: static PetscErrorCode CreateMatrix(DM shell, Mat *A)
90: {
91: DM da;
93: PetscFunctionBeginUser;
94: PetscCall(DMShellGetContext(shell, &da));
95: PetscCall(DMCreateMatrix(da, A));
96: PetscFunctionReturn(PETSC_SUCCESS);
97: }
99: static PetscErrorCode CreateInterpolation(DM dm1, DM dm2, Mat *mat, Vec *vec)
100: {
101: DM da1, da2;
103: PetscFunctionBeginUser;
104: PetscCall(DMShellGetContext(dm1, &da1));
105: PetscCall(DMShellGetContext(dm2, &da2));
106: PetscCall(DMCreateInterpolation(da1, da2, mat, vec));
107: PetscFunctionReturn(PETSC_SUCCESS);
108: }
110: static PetscErrorCode CreateRestriction(DM dm1, DM dm2, Mat *mat)
111: {
112: DM da1, da2;
113: Mat tmat;
115: PetscFunctionBeginUser;
116: PetscCall(DMShellGetContext(dm1, &da1));
117: PetscCall(DMShellGetContext(dm2, &da2));
118: PetscCall(DMCreateInterpolation(da1, da2, &tmat, NULL));
119: PetscCall(MatTranspose(tmat, MAT_INITIAL_MATRIX, mat));
120: PetscCall(MatDestroy(&tmat));
121: PetscFunctionReturn(PETSC_SUCCESS);
122: }
124: static PetscErrorCode CreateGlobalVector(DM shell, Vec *x)
125: {
126: DM da;
128: PetscFunctionBeginUser;
129: PetscCall(DMShellGetContext(shell, &da));
130: PetscCall(DMCreateGlobalVector(da, x));
131: PetscCall(VecSetDM(*x, shell));
132: PetscFunctionReturn(PETSC_SUCCESS);
133: }
135: static PetscErrorCode CreateLocalVector(DM shell, Vec *x)
136: {
137: DM da;
139: PetscFunctionBeginUser;
140: PetscCall(DMShellGetContext(shell, &da));
141: PetscCall(DMCreateLocalVector(da, x));
142: PetscCall(VecSetDM(*x, shell));
143: PetscFunctionReturn(PETSC_SUCCESS);
144: }
146: static PetscErrorCode Refine(DM shell, MPI_Comm comm, DM *dmnew)
147: {
148: DM da, dafine;
150: PetscFunctionBeginUser;
151: PetscCall(DMShellGetContext(shell, &da));
152: PetscCall(DMRefine(da, comm, &dafine));
153: PetscCall(MyDMShellCreate(PetscObjectComm((PetscObject)shell), dafine, dmnew));
154: PetscFunctionReturn(PETSC_SUCCESS);
155: }
157: static PetscErrorCode Coarsen(DM shell, MPI_Comm comm, DM *dmnew)
158: {
159: DM da, dacoarse;
161: PetscFunctionBeginUser;
162: PetscCall(DMShellGetContext(shell, &da));
163: PetscCall(DMCoarsen(da, comm, &dacoarse));
164: PetscCall(MyDMShellCreate(PetscObjectComm((PetscObject)shell), dacoarse, dmnew));
165: PetscFunctionReturn(PETSC_SUCCESS);
166: }
168: static PetscErrorCode ComputeRHS(KSP ksp, Vec b, void *ctx)
169: {
170: PetscInt mx, idx[2];
171: PetscScalar h, v[2];
172: DM da, shell;
174: PetscFunctionBeginUser;
175: PetscCall(KSPGetDM(ksp, &shell));
176: PetscCall(DMShellGetContext(shell, &da));
177: PetscCall(DMDAGetInfo(da, 0, &mx, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
178: h = 1.0 / ((mx - 1));
179: PetscCall(VecSet(b, h));
180: idx[0] = 0;
181: idx[1] = mx - 1;
182: v[0] = v[1] = 0.0;
183: PetscCall(VecSetValues(b, 2, idx, v, INSERT_VALUES));
184: PetscCall(VecAssemblyBegin(b));
185: PetscCall(VecAssemblyEnd(b));
186: PetscFunctionReturn(PETSC_SUCCESS);
187: }
189: static PetscErrorCode ComputeMatrix(KSP ksp, Mat J, Mat jac, void *ctx)
190: {
191: PetscInt i, mx, xm, xs;
192: PetscScalar v[3], h;
193: MatStencil row, col[3];
194: DM da, shell;
196: PetscFunctionBeginUser;
197: PetscCall(KSPGetDM(ksp, &shell));
198: PetscCall(DMShellGetContext(shell, &da));
199: PetscCall(DMDAGetInfo(da, 0, &mx, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
200: PetscCall(DMDAGetCorners(da, &xs, 0, 0, &xm, 0, 0));
201: h = 1.0 / (mx - 1);
203: for (i = xs; i < xs + xm; i++) {
204: row.i = i;
205: if (i == 0 || i == mx - 1) {
206: v[0] = 2.0 / h;
207: PetscCall(MatSetValuesStencil(jac, 1, &row, 1, &row, v, INSERT_VALUES));
208: } else {
209: v[0] = (-1.0) / h;
210: col[0].i = i - 1;
211: v[1] = (2.0) / h;
212: col[1].i = row.i;
213: v[2] = (-1.0) / h;
214: col[2].i = i + 1;
215: PetscCall(MatSetValuesStencil(jac, 1, &row, 3, col, v, INSERT_VALUES));
216: }
217: }
218: PetscCall(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY));
219: PetscCall(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY));
220: PetscFunctionReturn(PETSC_SUCCESS);
221: }
223: /*TEST
225: test:
226: nsize: 4
227: args: -ksp_monitor -pc_type mg -da_refine 3
229: TEST*/