Actual source code: ex19.c


  2: static char help[] = "Solvers Laplacian with multigrid, bad way.\n\
  3:   -mx <xg>, where <xg> = number of grid points in the x-direction\n\
  4:   -my <yg>, where <yg> = number of grid points in the y-direction\n\
  5:   -Nx <npx>, where <npx> = number of processors in the x-direction\n\
  6:   -Ny <npy>, where <npy> = number of processors in the y-direction\n\n";

  8: /*
  9:     This problem is modeled by
 10:     the partial differential equation

 12:             -Laplacian u  = g,  0 < x,y < 1,

 14:     with boundary conditions

 16:              u = 0  for  x = 0, x = 1, y = 0, y = 1.

 18:     A finite difference approximation with the usual 5-point stencil
 19:     is used to discretize the boundary value problem to obtain a nonlinear
 20:     system of equations.
 21: */

 23: #include <petscksp.h>
 24: #include <petscdm.h>
 25: #include <petscdmda.h>

 27: /* User-defined application contexts */

 29: typedef struct {
 30:   PetscInt mx, my;         /* number grid points in x and y direction */
 31:   Vec      localX, localF; /* local vectors with ghost region */
 32:   DM       da;
 33:   Vec      x, b, r; /* global vectors */
 34:   Mat      J;       /* Jacobian on grid */
 35: } GridCtx;

 37: typedef struct {
 38:   GridCtx  fine;
 39:   GridCtx  coarse;
 40:   KSP      ksp_coarse;
 41:   PetscInt ratio;
 42:   Mat      Ii; /* interpolation from coarse to fine */
 43: } AppCtx;

 45: #define COARSE_LEVEL 0
 46: #define FINE_LEVEL   1

 48: extern PetscErrorCode FormJacobian_Grid(AppCtx *, GridCtx *, Mat *);

 50: /*
 51:       Mm_ratio - ration of grid lines between fine and coarse grids.
 52: */
 53: int main(int argc, char **argv)
 54: {
 55:   AppCtx      user;
 56:   PetscInt    its, N, n, Nx = PETSC_DECIDE, Ny = PETSC_DECIDE, nlocal, Nlocal;
 57:   PetscMPIInt size;
 58:   KSP         ksp, ksp_fine;
 59:   PC          pc;
 60:   PetscScalar one = 1.0;

 62:   PetscFunctionBeginUser;
 63:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
 64:   user.ratio     = 2;
 65:   user.coarse.mx = 5;
 66:   user.coarse.my = 5;

 68:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-Mx", &user.coarse.mx, NULL));
 69:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-My", &user.coarse.my, NULL));
 70:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-ratio", &user.ratio, NULL));

 72:   user.fine.mx = user.ratio * (user.coarse.mx - 1) + 1;
 73:   user.fine.my = user.ratio * (user.coarse.my - 1) + 1;

 75:   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Coarse grid size %" PetscInt_FMT " by %" PetscInt_FMT "\n", user.coarse.mx, user.coarse.my));
 76:   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Fine grid size %" PetscInt_FMT " by %" PetscInt_FMT "\n", user.fine.mx, user.fine.my));

 78:   n = user.fine.mx * user.fine.my;
 79:   N = user.coarse.mx * user.coarse.my;

 81:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 82:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-Nx", &Nx, NULL));
 83:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-Ny", &Ny, NULL));

 85:   /* Set up distributed array for fine grid */
 86:   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, user.fine.mx, user.fine.my, Nx, Ny, 1, 1, NULL, NULL, &user.fine.da));
 87:   PetscCall(DMSetFromOptions(user.fine.da));
 88:   PetscCall(DMSetUp(user.fine.da));
 89:   PetscCall(DMCreateGlobalVector(user.fine.da, &user.fine.x));
 90:   PetscCall(VecDuplicate(user.fine.x, &user.fine.r));
 91:   PetscCall(VecDuplicate(user.fine.x, &user.fine.b));
 92:   PetscCall(VecGetLocalSize(user.fine.x, &nlocal));
 93:   PetscCall(DMCreateLocalVector(user.fine.da, &user.fine.localX));
 94:   PetscCall(VecDuplicate(user.fine.localX, &user.fine.localF));
 95:   PetscCall(MatCreateAIJ(PETSC_COMM_WORLD, nlocal, nlocal, n, n, 5, NULL, 3, NULL, &user.fine.J));

 97:   /* Set up distributed array for coarse grid */
 98:   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, user.coarse.mx, user.coarse.my, Nx, Ny, 1, 1, NULL, NULL, &user.coarse.da));
 99:   PetscCall(DMSetFromOptions(user.coarse.da));
100:   PetscCall(DMSetUp(user.coarse.da));
101:   PetscCall(DMCreateGlobalVector(user.coarse.da, &user.coarse.x));
102:   PetscCall(VecDuplicate(user.coarse.x, &user.coarse.b));
103:   PetscCall(VecGetLocalSize(user.coarse.x, &Nlocal));
104:   PetscCall(DMCreateLocalVector(user.coarse.da, &user.coarse.localX));
105:   PetscCall(VecDuplicate(user.coarse.localX, &user.coarse.localF));
106:   PetscCall(MatCreateAIJ(PETSC_COMM_WORLD, Nlocal, Nlocal, N, N, 5, NULL, 3, NULL, &user.coarse.J));

108:   /* Create linear solver */
109:   PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));

111:   /* set two level additive Schwarz preconditioner */
112:   PetscCall(KSPGetPC(ksp, &pc));
113:   PetscCall(PCSetType(pc, PCMG));
114:   PetscCall(PCMGSetLevels(pc, 2, NULL));
115:   PetscCall(PCMGSetType(pc, PC_MG_ADDITIVE));

117:   PetscCall(FormJacobian_Grid(&user, &user.coarse, &user.coarse.J));
118:   PetscCall(FormJacobian_Grid(&user, &user.fine, &user.fine.J));

120:   /* Create coarse level */
121:   PetscCall(PCMGGetCoarseSolve(pc, &user.ksp_coarse));
122:   PetscCall(KSPSetOptionsPrefix(user.ksp_coarse, "coarse_"));
123:   PetscCall(KSPSetFromOptions(user.ksp_coarse));
124:   PetscCall(KSPSetOperators(user.ksp_coarse, user.coarse.J, user.coarse.J));
125:   PetscCall(PCMGSetX(pc, COARSE_LEVEL, user.coarse.x));
126:   PetscCall(PCMGSetRhs(pc, COARSE_LEVEL, user.coarse.b));

128:   /* Create fine level */
129:   PetscCall(PCMGGetSmoother(pc, FINE_LEVEL, &ksp_fine));
130:   PetscCall(KSPSetOptionsPrefix(ksp_fine, "fine_"));
131:   PetscCall(KSPSetFromOptions(ksp_fine));
132:   PetscCall(KSPSetOperators(ksp_fine, user.fine.J, user.fine.J));
133:   PetscCall(PCMGSetR(pc, FINE_LEVEL, user.fine.r));

135:   /* Create interpolation between the levels */
136:   PetscCall(DMCreateInterpolation(user.coarse.da, user.fine.da, &user.Ii, NULL));
137:   PetscCall(PCMGSetInterpolation(pc, FINE_LEVEL, user.Ii));
138:   PetscCall(PCMGSetRestriction(pc, FINE_LEVEL, user.Ii));

140:   PetscCall(KSPSetOperators(ksp, user.fine.J, user.fine.J));

142:   PetscCall(VecSet(user.fine.b, one));

144:   /* Set options, then solve nonlinear system */
145:   PetscCall(KSPSetFromOptions(ksp));

147:   PetscCall(KSPSolve(ksp, user.fine.b, user.fine.x));
148:   PetscCall(KSPGetIterationNumber(ksp, &its));
149:   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Number of iterations = %" PetscInt_FMT "\n", its));

151:   /* Free data structures */
152:   PetscCall(MatDestroy(&user.fine.J));
153:   PetscCall(VecDestroy(&user.fine.x));
154:   PetscCall(VecDestroy(&user.fine.r));
155:   PetscCall(VecDestroy(&user.fine.b));
156:   PetscCall(DMDestroy(&user.fine.da));
157:   PetscCall(VecDestroy(&user.fine.localX));
158:   PetscCall(VecDestroy(&user.fine.localF));

160:   PetscCall(MatDestroy(&user.coarse.J));
161:   PetscCall(VecDestroy(&user.coarse.x));
162:   PetscCall(VecDestroy(&user.coarse.b));
163:   PetscCall(DMDestroy(&user.coarse.da));
164:   PetscCall(VecDestroy(&user.coarse.localX));
165:   PetscCall(VecDestroy(&user.coarse.localF));

167:   PetscCall(KSPDestroy(&ksp));
168:   PetscCall(MatDestroy(&user.Ii));
169:   PetscCall(PetscFinalize());
170:   return 0;
171: }

173: PetscErrorCode FormJacobian_Grid(AppCtx *user, GridCtx *grid, Mat *J)
174: {
175:   Mat                    jac = *J;
176:   PetscInt               i, j, row, mx, my, xs, ys, xm, ym, Xs, Ys, Xm, Ym, col[5];
177:   PetscInt               grow;
178:   const PetscInt        *ltog;
179:   PetscScalar            two = 2.0, one = 1.0, v[5], hx, hy, hxdhy, hydhx, value;
180:   ISLocalToGlobalMapping ltogm;

182:   mx    = grid->mx;
183:   my    = grid->my;
184:   hx    = one / (PetscReal)(mx - 1);
185:   hy    = one / (PetscReal)(my - 1);
186:   hxdhy = hx / hy;
187:   hydhx = hy / hx;

189:   /* Get ghost points */
190:   PetscCall(DMDAGetCorners(grid->da, &xs, &ys, 0, &xm, &ym, 0));
191:   PetscCall(DMDAGetGhostCorners(grid->da, &Xs, &Ys, 0, &Xm, &Ym, 0));
192:   PetscCall(DMGetLocalToGlobalMapping(grid->da, &ltogm));
193:   PetscCall(ISLocalToGlobalMappingGetIndices(ltogm, &ltog));

195:   /* Evaluate Jacobian of function */
196:   for (j = ys; j < ys + ym; j++) {
197:     row = (j - Ys) * Xm + xs - Xs - 1;
198:     for (i = xs; i < xs + xm; i++) {
199:       row++;
200:       grow = ltog[row];
201:       if (i > 0 && i < mx - 1 && j > 0 && j < my - 1) {
202:         v[0]   = -hxdhy;
203:         col[0] = ltog[row - Xm];
204:         v[1]   = -hydhx;
205:         col[1] = ltog[row - 1];
206:         v[2]   = two * (hydhx + hxdhy);
207:         col[2] = grow;
208:         v[3]   = -hydhx;
209:         col[3] = ltog[row + 1];
210:         v[4]   = -hxdhy;
211:         col[4] = ltog[row + Xm];
212:         PetscCall(MatSetValues(jac, 1, &grow, 5, col, v, INSERT_VALUES));
213:       } else if ((i > 0 && i < mx - 1) || (j > 0 && j < my - 1)) {
214:         value = .5 * two * (hydhx + hxdhy);
215:         PetscCall(MatSetValues(jac, 1, &grow, 1, &grow, &value, INSERT_VALUES));
216:       } else {
217:         value = .25 * two * (hydhx + hxdhy);
218:         PetscCall(MatSetValues(jac, 1, &grow, 1, &grow, &value, INSERT_VALUES));
219:       }
220:     }
221:   }
222:   PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogm, &ltog));
223:   PetscCall(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY));
224:   PetscCall(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY));

226:   return PETSC_SUCCESS;
227: }

229: /*TEST

231:     test:
232:       args: -ksp_gmres_cgs_refinement_type refine_always -pc_type jacobi -ksp_monitor_short -ksp_type gmres

234:     test:
235:       suffix: 2
236:       nsize: 3
237:       args: -ksp_monitor_short

239: TEST*/