Actual source code: mg.c
2: /*
3: Defines the multigrid preconditioner interface.
4: */
5: #include <petsc/private/pcmgimpl.h>
6: #include <petsc/private/kspimpl.h>
7: #include <petscdm.h>
8: PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC, PetscBool *);
10: /*
11: Contains the list of registered coarse space construction routines
12: */
13: PetscFunctionList PCMGCoarseList = NULL;
15: PetscErrorCode PCMGMCycle_Private(PC pc, PC_MG_Levels **mglevelsin, PetscBool transpose, PetscBool matapp, PCRichardsonConvergedReason *reason)
16: {
17: PC_MG *mg = (PC_MG *)pc->data;
18: PC_MG_Levels *mgc, *mglevels = *mglevelsin;
19: PetscInt cycles = (mglevels->level == 1) ? 1 : (PetscInt)mglevels->cycles;
21: PetscFunctionBegin;
22: if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0));
23: if (!transpose) {
24: if (matapp) {
25: PetscCall(KSPMatSolve(mglevels->smoothd, mglevels->B, mglevels->X)); /* pre-smooth */
26: PetscCall(KSPCheckSolve(mglevels->smoothd, pc, NULL));
27: } else {
28: PetscCall(KSPSolve(mglevels->smoothd, mglevels->b, mglevels->x)); /* pre-smooth */
29: PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x));
30: }
31: } else {
32: PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported");
33: PetscCall(KSPSolveTranspose(mglevels->smoothu, mglevels->b, mglevels->x)); /* transpose of post-smooth */
34: PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x));
35: }
36: if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0));
37: if (mglevels->level) { /* not the coarsest grid */
38: if (mglevels->eventresidual) PetscCall(PetscLogEventBegin(mglevels->eventresidual, 0, 0, 0, 0));
39: if (matapp && !mglevels->R) PetscCall(MatDuplicate(mglevels->B, MAT_DO_NOT_COPY_VALUES, &mglevels->R));
40: if (!transpose) {
41: if (matapp) PetscCall((*mglevels->matresidual)(mglevels->A, mglevels->B, mglevels->X, mglevels->R));
42: else PetscCall((*mglevels->residual)(mglevels->A, mglevels->b, mglevels->x, mglevels->r));
43: } else {
44: if (matapp) PetscCall((*mglevels->matresidualtranspose)(mglevels->A, mglevels->B, mglevels->X, mglevels->R));
45: else PetscCall((*mglevels->residualtranspose)(mglevels->A, mglevels->b, mglevels->x, mglevels->r));
46: }
47: if (mglevels->eventresidual) PetscCall(PetscLogEventEnd(mglevels->eventresidual, 0, 0, 0, 0));
49: /* if on finest level and have convergence criteria set */
50: if (mglevels->level == mglevels->levels - 1 && mg->ttol && reason) {
51: PetscReal rnorm;
52: PetscCall(VecNorm(mglevels->r, NORM_2, &rnorm));
53: if (rnorm <= mg->ttol) {
54: if (rnorm < mg->abstol) {
55: *reason = PCRICHARDSON_CONVERGED_ATOL;
56: PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than absolute tolerance %g\n", (double)rnorm, (double)mg->abstol));
57: } else {
58: *reason = PCRICHARDSON_CONVERGED_RTOL;
59: PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than relative tolerance times initial residual norm %g\n", (double)rnorm, (double)mg->ttol));
60: }
61: PetscFunctionReturn(PETSC_SUCCESS);
62: }
63: }
65: mgc = *(mglevelsin - 1);
66: if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0));
67: if (!transpose) {
68: if (matapp) PetscCall(MatMatRestrict(mglevels->restrct, mglevels->R, &mgc->B));
69: else PetscCall(MatRestrict(mglevels->restrct, mglevels->r, mgc->b));
70: } else {
71: if (matapp) PetscCall(MatMatRestrict(mglevels->interpolate, mglevels->R, &mgc->B));
72: else PetscCall(MatRestrict(mglevels->interpolate, mglevels->r, mgc->b));
73: }
74: if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0));
75: if (matapp) {
76: if (!mgc->X) {
77: PetscCall(MatDuplicate(mgc->B, MAT_DO_NOT_COPY_VALUES, &mgc->X));
78: } else {
79: PetscCall(MatZeroEntries(mgc->X));
80: }
81: } else {
82: PetscCall(VecZeroEntries(mgc->x));
83: }
84: while (cycles--) PetscCall(PCMGMCycle_Private(pc, mglevelsin - 1, transpose, matapp, reason));
85: if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0));
86: if (!transpose) {
87: if (matapp) PetscCall(MatMatInterpolateAdd(mglevels->interpolate, mgc->X, mglevels->X, &mglevels->X));
88: else PetscCall(MatInterpolateAdd(mglevels->interpolate, mgc->x, mglevels->x, mglevels->x));
89: } else {
90: PetscCall(MatInterpolateAdd(mglevels->restrct, mgc->x, mglevels->x, mglevels->x));
91: }
92: if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0));
93: if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0));
94: if (!transpose) {
95: if (matapp) {
96: PetscCall(KSPMatSolve(mglevels->smoothu, mglevels->B, mglevels->X)); /* post smooth */
97: PetscCall(KSPCheckSolve(mglevels->smoothu, pc, NULL));
98: } else {
99: PetscCall(KSPSolve(mglevels->smoothu, mglevels->b, mglevels->x)); /* post smooth */
100: PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x));
101: }
102: } else {
103: PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported");
104: PetscCall(KSPSolveTranspose(mglevels->smoothd, mglevels->b, mglevels->x)); /* post smooth */
105: PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x));
106: }
107: if (mglevels->cr) {
108: PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported");
109: /* TODO Turn on copy and turn off noisy if we have an exact solution
110: PetscCall(VecCopy(mglevels->x, mglevels->crx));
111: PetscCall(VecCopy(mglevels->b, mglevels->crb)); */
112: PetscCall(KSPSetNoisy_Private(mglevels->crx));
113: PetscCall(KSPSolve(mglevels->cr, mglevels->crb, mglevels->crx)); /* compatible relaxation */
114: PetscCall(KSPCheckSolve(mglevels->cr, pc, mglevels->crx));
115: }
116: if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0));
117: }
118: PetscFunctionReturn(PETSC_SUCCESS);
119: }
121: static PetscErrorCode PCApplyRichardson_MG(PC pc, Vec b, Vec x, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason)
122: {
123: PC_MG *mg = (PC_MG *)pc->data;
124: PC_MG_Levels **mglevels = mg->levels;
125: PC tpc;
126: PetscBool changeu, changed;
127: PetscInt levels = mglevels[0]->levels, i;
129: PetscFunctionBegin;
130: /* When the DM is supplying the matrix then it will not exist until here */
131: for (i = 0; i < levels; i++) {
132: if (!mglevels[i]->A) {
133: PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL));
134: PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A));
135: }
136: }
138: PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc));
139: PetscCall(PCPreSolveChangeRHS(tpc, &changed));
140: PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc));
141: PetscCall(PCPreSolveChangeRHS(tpc, &changeu));
142: if (!changed && !changeu) {
143: PetscCall(VecDestroy(&mglevels[levels - 1]->b));
144: mglevels[levels - 1]->b = b;
145: } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */
146: if (!mglevels[levels - 1]->b) {
147: Vec *vec;
149: PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL));
150: mglevels[levels - 1]->b = *vec;
151: PetscCall(PetscFree(vec));
152: }
153: PetscCall(VecCopy(b, mglevels[levels - 1]->b));
154: }
155: mglevels[levels - 1]->x = x;
157: mg->rtol = rtol;
158: mg->abstol = abstol;
159: mg->dtol = dtol;
160: if (rtol) {
161: /* compute initial residual norm for relative convergence test */
162: PetscReal rnorm;
163: if (zeroguess) {
164: PetscCall(VecNorm(b, NORM_2, &rnorm));
165: } else {
166: PetscCall((*mglevels[levels - 1]->residual)(mglevels[levels - 1]->A, b, x, w));
167: PetscCall(VecNorm(w, NORM_2, &rnorm));
168: }
169: mg->ttol = PetscMax(rtol * rnorm, abstol);
170: } else if (abstol) mg->ttol = abstol;
171: else mg->ttol = 0.0;
173: /* since smoother is applied to full system, not just residual we need to make sure that smoothers don't
174: stop prematurely due to small residual */
175: for (i = 1; i < levels; i++) {
176: PetscCall(KSPSetTolerances(mglevels[i]->smoothu, 0, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
177: if (mglevels[i]->smoothu != mglevels[i]->smoothd) {
178: /* For Richardson the initial guess is nonzero since it is solving in each cycle the original system not just applying as a preconditioner */
179: PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE));
180: PetscCall(KSPSetTolerances(mglevels[i]->smoothd, 0, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
181: }
182: }
184: *reason = (PCRichardsonConvergedReason)0;
185: for (i = 0; i < its; i++) {
186: PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, PETSC_FALSE, PETSC_FALSE, reason));
187: if (*reason) break;
188: }
189: if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
190: *outits = i;
191: if (!changed && !changeu) mglevels[levels - 1]->b = NULL;
192: PetscFunctionReturn(PETSC_SUCCESS);
193: }
195: PetscErrorCode PCReset_MG(PC pc)
196: {
197: PC_MG *mg = (PC_MG *)pc->data;
198: PC_MG_Levels **mglevels = mg->levels;
199: PetscInt i, n;
201: PetscFunctionBegin;
202: if (mglevels) {
203: n = mglevels[0]->levels;
204: for (i = 0; i < n - 1; i++) {
205: PetscCall(VecDestroy(&mglevels[i + 1]->r));
206: PetscCall(VecDestroy(&mglevels[i]->b));
207: PetscCall(VecDestroy(&mglevels[i]->x));
208: PetscCall(MatDestroy(&mglevels[i + 1]->R));
209: PetscCall(MatDestroy(&mglevels[i]->B));
210: PetscCall(MatDestroy(&mglevels[i]->X));
211: PetscCall(VecDestroy(&mglevels[i]->crx));
212: PetscCall(VecDestroy(&mglevels[i]->crb));
213: PetscCall(MatDestroy(&mglevels[i + 1]->restrct));
214: PetscCall(MatDestroy(&mglevels[i + 1]->interpolate));
215: PetscCall(MatDestroy(&mglevels[i + 1]->inject));
216: PetscCall(VecDestroy(&mglevels[i + 1]->rscale));
217: }
218: PetscCall(VecDestroy(&mglevels[n - 1]->crx));
219: PetscCall(VecDestroy(&mglevels[n - 1]->crb));
220: /* this is not null only if the smoother on the finest level
221: changes the rhs during PreSolve */
222: PetscCall(VecDestroy(&mglevels[n - 1]->b));
223: PetscCall(MatDestroy(&mglevels[n - 1]->B));
225: for (i = 0; i < n; i++) {
226: PetscCall(MatDestroy(&mglevels[i]->coarseSpace));
227: PetscCall(MatDestroy(&mglevels[i]->A));
228: if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPReset(mglevels[i]->smoothd));
229: PetscCall(KSPReset(mglevels[i]->smoothu));
230: if (mglevels[i]->cr) PetscCall(KSPReset(mglevels[i]->cr));
231: }
232: mg->Nc = 0;
233: }
234: PetscFunctionReturn(PETSC_SUCCESS);
235: }
237: /* Implementing CR
239: We only want to make corrections that ``do not change'' the coarse solution. What we mean by not changing is that if I prolong my coarse solution to the fine grid and then inject that fine solution back to the coarse grid, I get the same answer. Injection is what Brannick calls R. We want the complementary projector to Inj, which we will call S, after Brannick, so that Inj S = 0. Now the orthogonal projector onto the range of Inj^T is
241: Inj^T (Inj Inj^T)^{-1} Inj
243: and if Inj is a VecScatter, as it is now in PETSc, we have
245: Inj^T Inj
247: and
249: S = I - Inj^T Inj
251: since
253: Inj S = Inj - (Inj Inj^T) Inj = 0.
255: Brannick suggests
257: A \to S^T A S \qquad\mathrm{and}\qquad M \to S^T M S
259: but I do not think his :math:`S^T S = I` is correct. Our S is an orthogonal projector, so :math:`S^T S = S^2 = S`. We will use
261: M^{-1} A \to S M^{-1} A S
263: In fact, since it is somewhat hard in PETSc to do the symmetric application, we will just apply S on the left.
265: Check: || Inj P - I ||_F < tol
266: Check: In general, Inj Inj^T = I
267: */
269: typedef struct {
270: PC mg; /* The PCMG object */
271: PetscInt l; /* The multigrid level for this solver */
272: Mat Inj; /* The injection matrix */
273: Mat S; /* I - Inj^T Inj */
274: } CRContext;
276: static PetscErrorCode CRSetup_Private(PC pc)
277: {
278: CRContext *ctx;
279: Mat It;
281: PetscFunctionBeginUser;
282: PetscCall(PCShellGetContext(pc, &ctx));
283: PetscCall(PCMGGetInjection(ctx->mg, ctx->l, &It));
284: PetscCheck(It, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "CR requires that injection be defined for this PCMG");
285: PetscCall(MatCreateTranspose(It, &ctx->Inj));
286: PetscCall(MatCreateNormal(ctx->Inj, &ctx->S));
287: PetscCall(MatScale(ctx->S, -1.0));
288: PetscCall(MatShift(ctx->S, 1.0));
289: PetscFunctionReturn(PETSC_SUCCESS);
290: }
292: static PetscErrorCode CRApply_Private(PC pc, Vec x, Vec y)
293: {
294: CRContext *ctx;
296: PetscFunctionBeginUser;
297: PetscCall(PCShellGetContext(pc, &ctx));
298: PetscCall(MatMult(ctx->S, x, y));
299: PetscFunctionReturn(PETSC_SUCCESS);
300: }
302: static PetscErrorCode CRDestroy_Private(PC pc)
303: {
304: CRContext *ctx;
306: PetscFunctionBeginUser;
307: PetscCall(PCShellGetContext(pc, &ctx));
308: PetscCall(MatDestroy(&ctx->Inj));
309: PetscCall(MatDestroy(&ctx->S));
310: PetscCall(PetscFree(ctx));
311: PetscCall(PCShellSetContext(pc, NULL));
312: PetscFunctionReturn(PETSC_SUCCESS);
313: }
315: static PetscErrorCode CreateCR_Private(PC pc, PetscInt l, PC *cr)
316: {
317: CRContext *ctx;
319: PetscFunctionBeginUser;
320: PetscCall(PCCreate(PetscObjectComm((PetscObject)pc), cr));
321: PetscCall(PetscObjectSetName((PetscObject)*cr, "S (complementary projector to injection)"));
322: PetscCall(PetscCalloc1(1, &ctx));
323: ctx->mg = pc;
324: ctx->l = l;
325: PetscCall(PCSetType(*cr, PCSHELL));
326: PetscCall(PCShellSetContext(*cr, ctx));
327: PetscCall(PCShellSetApply(*cr, CRApply_Private));
328: PetscCall(PCShellSetSetUp(*cr, CRSetup_Private));
329: PetscCall(PCShellSetDestroy(*cr, CRDestroy_Private));
330: PetscFunctionReturn(PETSC_SUCCESS);
331: }
333: PetscErrorCode PCMGSetLevels_MG(PC pc, PetscInt levels, MPI_Comm *comms)
334: {
335: PC_MG *mg = (PC_MG *)pc->data;
336: MPI_Comm comm;
337: PC_MG_Levels **mglevels = mg->levels;
338: PCMGType mgtype = mg->am;
339: PetscInt mgctype = (PetscInt)PC_MG_CYCLE_V;
340: PetscInt i;
341: PetscMPIInt size;
342: const char *prefix;
343: PC ipc;
344: PetscInt n;
346: PetscFunctionBegin;
349: if (mg->nlevels == levels) PetscFunctionReturn(PETSC_SUCCESS);
350: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
351: if (mglevels) {
352: mgctype = mglevels[0]->cycles;
353: /* changing the number of levels so free up the previous stuff */
354: PetscCall(PCReset_MG(pc));
355: n = mglevels[0]->levels;
356: for (i = 0; i < n; i++) {
357: if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd));
358: PetscCall(KSPDestroy(&mglevels[i]->smoothu));
359: PetscCall(KSPDestroy(&mglevels[i]->cr));
360: PetscCall(PetscFree(mglevels[i]));
361: }
362: PetscCall(PetscFree(mg->levels));
363: }
365: mg->nlevels = levels;
367: PetscCall(PetscMalloc1(levels, &mglevels));
369: PetscCall(PCGetOptionsPrefix(pc, &prefix));
371: mg->stageApply = 0;
372: for (i = 0; i < levels; i++) {
373: PetscCall(PetscNew(&mglevels[i]));
375: mglevels[i]->level = i;
376: mglevels[i]->levels = levels;
377: mglevels[i]->cycles = mgctype;
378: mg->default_smoothu = 2;
379: mg->default_smoothd = 2;
380: mglevels[i]->eventsmoothsetup = 0;
381: mglevels[i]->eventsmoothsolve = 0;
382: mglevels[i]->eventresidual = 0;
383: mglevels[i]->eventinterprestrict = 0;
385: if (comms) comm = comms[i];
386: if (comm != MPI_COMM_NULL) {
387: PetscCall(KSPCreate(comm, &mglevels[i]->smoothd));
388: PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->smoothd, pc->erroriffailure));
389: PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->smoothd, (PetscObject)pc, levels - i));
390: PetscCall(KSPSetOptionsPrefix(mglevels[i]->smoothd, prefix));
391: PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->smoothd, PetscMGLevelId, mglevels[i]->level));
392: if (i || levels == 1) {
393: char tprefix[128];
395: PetscCall(KSPSetType(mglevels[i]->smoothd, KSPCHEBYSHEV));
396: PetscCall(KSPSetConvergenceTest(mglevels[i]->smoothd, KSPConvergedSkip, NULL, NULL));
397: PetscCall(KSPSetNormType(mglevels[i]->smoothd, KSP_NORM_NONE));
398: PetscCall(KSPGetPC(mglevels[i]->smoothd, &ipc));
399: PetscCall(PCSetType(ipc, PCSOR));
400: PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, mg->default_smoothd));
402: PetscCall(PetscSNPrintf(tprefix, 128, "mg_levels_%d_", (int)i));
403: PetscCall(KSPAppendOptionsPrefix(mglevels[i]->smoothd, tprefix));
404: } else {
405: PetscCall(KSPAppendOptionsPrefix(mglevels[0]->smoothd, "mg_coarse_"));
407: /* coarse solve is (redundant) LU by default; set shifttype NONZERO to avoid annoying zero-pivot in LU preconditioner */
408: PetscCall(KSPSetType(mglevels[0]->smoothd, KSPPREONLY));
409: PetscCall(KSPGetPC(mglevels[0]->smoothd, &ipc));
410: PetscCallMPI(MPI_Comm_size(comm, &size));
411: if (size > 1) {
412: PetscCall(PCSetType(ipc, PCREDUNDANT));
413: } else {
414: PetscCall(PCSetType(ipc, PCLU));
415: }
416: PetscCall(PCFactorSetShiftType(ipc, MAT_SHIFT_INBLOCKS));
417: }
418: }
419: mglevels[i]->smoothu = mglevels[i]->smoothd;
420: mg->rtol = 0.0;
421: mg->abstol = 0.0;
422: mg->dtol = 0.0;
423: mg->ttol = 0.0;
424: mg->cyclesperpcapply = 1;
425: }
426: mg->levels = mglevels;
427: PetscCall(PCMGSetType(pc, mgtype));
428: PetscFunctionReturn(PETSC_SUCCESS);
429: }
431: /*@C
432: PCMGSetLevels - Sets the number of levels to use with `PCMG`.
433: Must be called before any other `PCMG` routine.
435: Logically Collective
437: Input Parameters:
438: + pc - the preconditioner context
439: . levels - the number of levels
440: - comms - optional communicators for each level; this is to allow solving the coarser problems
441: on smaller sets of processes. For processes that are not included in the computation
442: you must pass `MPI_COMM_NULL`. Use comms = `NULL` to specify that all processes
443: should participate in each level of problem.
445: Level: intermediate
447: Notes:
448: If the number of levels is one then the multigrid uses the `-mg_levels` prefix
449: for setting the level options rather than the `-mg_coarse` prefix.
451: You can free the information in comms after this routine is called.
453: The array of MPI communicators must contain `MPI_COMM_NULL` for those ranks that at each level
454: are not participating in the coarser solve. For example, with 2 levels and 1 and 2 ranks on
455: the two levels, rank 0 in the original communicator will pass in an array of 2 communicators
456: of size 2 and 1, while rank 1 in the original communicator will pass in array of 2 communicators
457: the first of size 2 and the second of value `MPI_COMM_NULL` since the rank 1 does not participate
458: in the coarse grid solve.
460: Since each coarser level may have a new `MPI_Comm` with fewer ranks than the previous, one
461: must take special care in providing the restriction and interpolation operation. We recommend
462: providing these as two step operations; first perform a standard restriction or interpolation on
463: the full number of ranks for that level and then use an MPI call to copy the resulting vector
464: array entries (after calls to VecGetArray()) to the smaller or larger number of ranks, note in both
465: cases the MPI calls must be made on the larger of the two communicators. Traditional MPI send and
466: receives or `MPI_AlltoAllv()` could be used to do the reshuffling of the vector entries.
468: Fortran Note:
469: Use comms = `PETSC_NULL_MPI_COMM` as the equivalent of `NULL` in the C interface. Note `PETSC_NULL_MPI_COMM`
470: is not `MPI_COMM_NULL`. It is more like `PETSC_NULL_INTEGER`, `PETSC_NULL_REAL` etc.
472: .seealso: `PCMGSetType()`, `PCMGGetLevels()`
473: @*/
474: PetscErrorCode PCMGSetLevels(PC pc, PetscInt levels, MPI_Comm *comms)
475: {
476: PetscFunctionBegin;
479: PetscTryMethod(pc, "PCMGSetLevels_C", (PC, PetscInt, MPI_Comm *), (pc, levels, comms));
480: PetscFunctionReturn(PETSC_SUCCESS);
481: }
483: PetscErrorCode PCDestroy_MG(PC pc)
484: {
485: PC_MG *mg = (PC_MG *)pc->data;
486: PC_MG_Levels **mglevels = mg->levels;
487: PetscInt i, n;
489: PetscFunctionBegin;
490: PetscCall(PCReset_MG(pc));
491: if (mglevels) {
492: n = mglevels[0]->levels;
493: for (i = 0; i < n; i++) {
494: if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd));
495: PetscCall(KSPDestroy(&mglevels[i]->smoothu));
496: PetscCall(KSPDestroy(&mglevels[i]->cr));
497: PetscCall(PetscFree(mglevels[i]));
498: }
499: PetscCall(PetscFree(mg->levels));
500: }
501: PetscCall(PetscFree(pc->data));
502: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL));
503: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL));
504: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", NULL));
505: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", NULL));
506: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", NULL));
507: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL));
508: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL));
509: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", NULL));
510: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", NULL));
511: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", NULL));
512: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", NULL));
513: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", NULL));
514: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", NULL));
515: PetscFunctionReturn(PETSC_SUCCESS);
516: }
518: /*
519: PCApply_MG - Runs either an additive, multiplicative, Kaskadic
520: or full cycle of multigrid.
522: Note:
523: A simple wrapper which calls PCMGMCycle(),PCMGACycle(), or PCMGFCycle().
524: */
525: static PetscErrorCode PCApply_MG_Internal(PC pc, Vec b, Vec x, Mat B, Mat X, PetscBool transpose)
526: {
527: PC_MG *mg = (PC_MG *)pc->data;
528: PC_MG_Levels **mglevels = mg->levels;
529: PC tpc;
530: PetscInt levels = mglevels[0]->levels, i;
531: PetscBool changeu, changed, matapp;
533: PetscFunctionBegin;
534: matapp = (PetscBool)(B && X);
535: if (mg->stageApply) PetscCall(PetscLogStagePush(mg->stageApply));
536: /* When the DM is supplying the matrix then it will not exist until here */
537: for (i = 0; i < levels; i++) {
538: if (!mglevels[i]->A) {
539: PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL));
540: PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A));
541: }
542: }
544: PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc));
545: PetscCall(PCPreSolveChangeRHS(tpc, &changed));
546: PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc));
547: PetscCall(PCPreSolveChangeRHS(tpc, &changeu));
548: if (!changeu && !changed) {
549: if (matapp) {
550: PetscCall(MatDestroy(&mglevels[levels - 1]->B));
551: mglevels[levels - 1]->B = B;
552: } else {
553: PetscCall(VecDestroy(&mglevels[levels - 1]->b));
554: mglevels[levels - 1]->b = b;
555: }
556: } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */
557: if (matapp) {
558: if (mglevels[levels - 1]->B) {
559: PetscInt N1, N2;
560: PetscBool flg;
562: PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &N1));
563: PetscCall(MatGetSize(B, NULL, &N2));
564: PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 1]->B, ((PetscObject)B)->type_name, &flg));
565: if (N1 != N2 || !flg) PetscCall(MatDestroy(&mglevels[levels - 1]->B));
566: }
567: if (!mglevels[levels - 1]->B) {
568: PetscCall(MatDuplicate(B, MAT_COPY_VALUES, &mglevels[levels - 1]->B));
569: } else {
570: PetscCall(MatCopy(B, mglevels[levels - 1]->B, SAME_NONZERO_PATTERN));
571: }
572: } else {
573: if (!mglevels[levels - 1]->b) {
574: Vec *vec;
576: PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL));
577: mglevels[levels - 1]->b = *vec;
578: PetscCall(PetscFree(vec));
579: }
580: PetscCall(VecCopy(b, mglevels[levels - 1]->b));
581: }
582: }
583: if (matapp) {
584: mglevels[levels - 1]->X = X;
585: } else {
586: mglevels[levels - 1]->x = x;
587: }
589: /* If coarser Xs are present, it means we have already block applied the PC at least once
590: Reset operators if sizes/type do no match */
591: if (matapp && levels > 1 && mglevels[levels - 2]->X) {
592: PetscInt Xc, Bc;
593: PetscBool flg;
595: PetscCall(MatGetSize(mglevels[levels - 2]->X, NULL, &Xc));
596: PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &Bc));
597: PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 2]->X, ((PetscObject)mglevels[levels - 1]->X)->type_name, &flg));
598: if (Xc != Bc || !flg) {
599: PetscCall(MatDestroy(&mglevels[levels - 1]->R));
600: for (i = 0; i < levels - 1; i++) {
601: PetscCall(MatDestroy(&mglevels[i]->R));
602: PetscCall(MatDestroy(&mglevels[i]->B));
603: PetscCall(MatDestroy(&mglevels[i]->X));
604: }
605: }
606: }
608: if (mg->am == PC_MG_MULTIPLICATIVE) {
609: if (matapp) PetscCall(MatZeroEntries(X));
610: else PetscCall(VecZeroEntries(x));
611: for (i = 0; i < mg->cyclesperpcapply; i++) PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, transpose, matapp, NULL));
612: } else if (mg->am == PC_MG_ADDITIVE) {
613: PetscCall(PCMGACycle_Private(pc, mglevels, transpose, matapp));
614: } else if (mg->am == PC_MG_KASKADE) {
615: PetscCall(PCMGKCycle_Private(pc, mglevels, transpose, matapp));
616: } else {
617: PetscCall(PCMGFCycle_Private(pc, mglevels, transpose, matapp));
618: }
619: if (mg->stageApply) PetscCall(PetscLogStagePop());
620: if (!changeu && !changed) {
621: if (matapp) {
622: mglevels[levels - 1]->B = NULL;
623: } else {
624: mglevels[levels - 1]->b = NULL;
625: }
626: }
627: PetscFunctionReturn(PETSC_SUCCESS);
628: }
630: static PetscErrorCode PCApply_MG(PC pc, Vec b, Vec x)
631: {
632: PetscFunctionBegin;
633: PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_FALSE));
634: PetscFunctionReturn(PETSC_SUCCESS);
635: }
637: static PetscErrorCode PCApplyTranspose_MG(PC pc, Vec b, Vec x)
638: {
639: PetscFunctionBegin;
640: PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_TRUE));
641: PetscFunctionReturn(PETSC_SUCCESS);
642: }
644: static PetscErrorCode PCMatApply_MG(PC pc, Mat b, Mat x)
645: {
646: PetscFunctionBegin;
647: PetscCall(PCApply_MG_Internal(pc, NULL, NULL, b, x, PETSC_FALSE));
648: PetscFunctionReturn(PETSC_SUCCESS);
649: }
651: PetscErrorCode PCSetFromOptions_MG(PC pc, PetscOptionItems *PetscOptionsObject)
652: {
653: PetscInt levels, cycles;
654: PetscBool flg, flg2;
655: PC_MG *mg = (PC_MG *)pc->data;
656: PC_MG_Levels **mglevels;
657: PCMGType mgtype;
658: PCMGCycleType mgctype;
659: PCMGGalerkinType gtype;
660: PCMGCoarseSpaceType coarseSpaceType;
662: PetscFunctionBegin;
663: levels = PetscMax(mg->nlevels, 1);
664: PetscOptionsHeadBegin(PetscOptionsObject, "Multigrid options");
665: PetscCall(PetscOptionsInt("-pc_mg_levels", "Number of Levels", "PCMGSetLevels", levels, &levels, &flg));
666: if (!flg && !mg->levels && pc->dm) {
667: PetscCall(DMGetRefineLevel(pc->dm, &levels));
668: levels++;
669: mg->usedmfornumberoflevels = PETSC_TRUE;
670: }
671: PetscCall(PCMGSetLevels(pc, levels, NULL));
672: mglevels = mg->levels;
674: mgctype = (PCMGCycleType)mglevels[0]->cycles;
675: PetscCall(PetscOptionsEnum("-pc_mg_cycle_type", "V cycle or for W-cycle", "PCMGSetCycleType", PCMGCycleTypes, (PetscEnum)mgctype, (PetscEnum *)&mgctype, &flg));
676: if (flg) PetscCall(PCMGSetCycleType(pc, mgctype));
677: gtype = mg->galerkin;
678: PetscCall(PetscOptionsEnum("-pc_mg_galerkin", "Use Galerkin process to compute coarser operators", "PCMGSetGalerkin", PCMGGalerkinTypes, (PetscEnum)gtype, (PetscEnum *)>ype, &flg));
679: if (flg) PetscCall(PCMGSetGalerkin(pc, gtype));
680: coarseSpaceType = mg->coarseSpaceType;
681: PetscCall(PetscOptionsEnum("-pc_mg_adapt_interp_coarse_space", "Type of adaptive coarse space: none, polynomial, harmonic, eigenvector, generalized_eigenvector, gdsw", "PCMGSetAdaptCoarseSpaceType", PCMGCoarseSpaceTypes, (PetscEnum)coarseSpaceType, (PetscEnum *)&coarseSpaceType, &flg));
682: if (flg) PetscCall(PCMGSetAdaptCoarseSpaceType(pc, coarseSpaceType));
683: PetscCall(PetscOptionsInt("-pc_mg_adapt_interp_n", "Size of the coarse space for adaptive interpolation", "PCMGSetCoarseSpace", mg->Nc, &mg->Nc, &flg));
684: PetscCall(PetscOptionsBool("-pc_mg_mesp_monitor", "Monitor the multilevel eigensolver", "PCMGSetAdaptInterpolation", PETSC_FALSE, &mg->mespMonitor, &flg));
685: flg2 = PETSC_FALSE;
686: PetscCall(PetscOptionsBool("-pc_mg_adapt_cr", "Monitor coarse space quality using Compatible Relaxation (CR)", "PCMGSetAdaptCR", PETSC_FALSE, &flg2, &flg));
687: if (flg) PetscCall(PCMGSetAdaptCR(pc, flg2));
688: flg = PETSC_FALSE;
689: PetscCall(PetscOptionsBool("-pc_mg_distinct_smoothup", "Create separate smoothup KSP and append the prefix _up", "PCMGSetDistinctSmoothUp", PETSC_FALSE, &flg, NULL));
690: if (flg) PetscCall(PCMGSetDistinctSmoothUp(pc));
691: mgtype = mg->am;
692: PetscCall(PetscOptionsEnum("-pc_mg_type", "Multigrid type", "PCMGSetType", PCMGTypes, (PetscEnum)mgtype, (PetscEnum *)&mgtype, &flg));
693: if (flg) PetscCall(PCMGSetType(pc, mgtype));
694: if (mg->am == PC_MG_MULTIPLICATIVE) {
695: PetscCall(PetscOptionsInt("-pc_mg_multiplicative_cycles", "Number of cycles for each preconditioner step", "PCMGMultiplicativeSetCycles", mg->cyclesperpcapply, &cycles, &flg));
696: if (flg) PetscCall(PCMGMultiplicativeSetCycles(pc, cycles));
697: }
698: flg = PETSC_FALSE;
699: PetscCall(PetscOptionsBool("-pc_mg_log", "Log times for each multigrid level", "None", flg, &flg, NULL));
700: if (flg) {
701: PetscInt i;
702: char eventname[128];
704: levels = mglevels[0]->levels;
705: for (i = 0; i < levels; i++) {
706: PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGSetup Level %d", (int)i));
707: PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsetup));
708: PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGSmooth Level %d", (int)i));
709: PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsolve));
710: if (i) {
711: PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGResid Level %d", (int)i));
712: PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventresidual));
713: PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGInterp Level %d", (int)i));
714: PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventinterprestrict));
715: }
716: }
718: #if defined(PETSC_USE_LOG)
719: {
720: const char *sname = "MG Apply";
721: PetscStageLog stageLog;
722: PetscInt st;
724: PetscCall(PetscLogGetStageLog(&stageLog));
725: for (st = 0; st < stageLog->numStages; ++st) {
726: PetscBool same;
728: PetscCall(PetscStrcmp(stageLog->stageInfo[st].name, sname, &same));
729: if (same) mg->stageApply = st;
730: }
731: if (!mg->stageApply) PetscCall(PetscLogStageRegister(sname, &mg->stageApply));
732: }
733: #endif
734: }
735: PetscOptionsHeadEnd();
736: /* Check option consistency */
737: PetscCall(PCMGGetGalerkin(pc, >ype));
738: PetscCall(PCMGGetAdaptInterpolation(pc, &flg));
739: PetscCheck(!flg || !(gtype >= PC_MG_GALERKIN_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use Galerkin coarse operators when adapting the interpolator");
740: PetscFunctionReturn(PETSC_SUCCESS);
741: }
743: const char *const PCMGTypes[] = {"MULTIPLICATIVE", "ADDITIVE", "FULL", "KASKADE", "PCMGType", "PC_MG", NULL};
744: const char *const PCMGCycleTypes[] = {"invalid", "v", "w", "PCMGCycleType", "PC_MG_CYCLE", NULL};
745: const char *const PCMGGalerkinTypes[] = {"both", "pmat", "mat", "none", "external", "PCMGGalerkinType", "PC_MG_GALERKIN", NULL};
746: const char *const PCMGCoarseSpaceTypes[] = {"none", "polynomial", "harmonic", "eigenvector", "generalized_eigenvector", "gdsw", "PCMGCoarseSpaceType", "PCMG_ADAPT_NONE", NULL};
748: #include <petscdraw.h>
749: PetscErrorCode PCView_MG(PC pc, PetscViewer viewer)
750: {
751: PC_MG *mg = (PC_MG *)pc->data;
752: PC_MG_Levels **mglevels = mg->levels;
753: PetscInt levels = mglevels ? mglevels[0]->levels : 0, i;
754: PetscBool iascii, isbinary, isdraw;
756: PetscFunctionBegin;
757: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
758: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
759: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
760: if (iascii) {
761: const char *cyclename = levels ? (mglevels[0]->cycles == PC_MG_CYCLE_V ? "v" : "w") : "unknown";
762: PetscCall(PetscViewerASCIIPrintf(viewer, " type is %s, levels=%" PetscInt_FMT " cycles=%s\n", PCMGTypes[mg->am], levels, cyclename));
763: if (mg->am == PC_MG_MULTIPLICATIVE) PetscCall(PetscViewerASCIIPrintf(viewer, " Cycles per PCApply=%" PetscInt_FMT "\n", mg->cyclesperpcapply));
764: if (mg->galerkin == PC_MG_GALERKIN_BOTH) {
765: PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices\n"));
766: } else if (mg->galerkin == PC_MG_GALERKIN_PMAT) {
767: PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices for pmat\n"));
768: } else if (mg->galerkin == PC_MG_GALERKIN_MAT) {
769: PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices for mat\n"));
770: } else if (mg->galerkin == PC_MG_GALERKIN_EXTERNAL) {
771: PetscCall(PetscViewerASCIIPrintf(viewer, " Using externally compute Galerkin coarse grid matrices\n"));
772: } else {
773: PetscCall(PetscViewerASCIIPrintf(viewer, " Not using Galerkin computed coarse grid matrices\n"));
774: }
775: if (mg->view) PetscCall((*mg->view)(pc, viewer));
776: for (i = 0; i < levels; i++) {
777: if (i) {
778: PetscCall(PetscViewerASCIIPrintf(viewer, "Down solver (pre-smoother) on level %" PetscInt_FMT " -------------------------------\n", i));
779: } else {
780: PetscCall(PetscViewerASCIIPrintf(viewer, "Coarse grid solver -- level %" PetscInt_FMT " -------------------------------\n", i));
781: }
782: PetscCall(PetscViewerASCIIPushTab(viewer));
783: PetscCall(KSPView(mglevels[i]->smoothd, viewer));
784: PetscCall(PetscViewerASCIIPopTab(viewer));
785: if (i && mglevels[i]->smoothd == mglevels[i]->smoothu) {
786: PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) same as down solver (pre-smoother)\n"));
787: } else if (i) {
788: PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) on level %" PetscInt_FMT " -------------------------------\n", i));
789: PetscCall(PetscViewerASCIIPushTab(viewer));
790: PetscCall(KSPView(mglevels[i]->smoothu, viewer));
791: PetscCall(PetscViewerASCIIPopTab(viewer));
792: }
793: if (i && mglevels[i]->cr) {
794: PetscCall(PetscViewerASCIIPrintf(viewer, "CR solver on level %" PetscInt_FMT " -------------------------------\n", i));
795: PetscCall(PetscViewerASCIIPushTab(viewer));
796: PetscCall(KSPView(mglevels[i]->cr, viewer));
797: PetscCall(PetscViewerASCIIPopTab(viewer));
798: }
799: }
800: } else if (isbinary) {
801: for (i = levels - 1; i >= 0; i--) {
802: PetscCall(KSPView(mglevels[i]->smoothd, viewer));
803: if (i && mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPView(mglevels[i]->smoothu, viewer));
804: }
805: } else if (isdraw) {
806: PetscDraw draw;
807: PetscReal x, w, y, bottom, th;
808: PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
809: PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
810: PetscCall(PetscDrawStringGetSize(draw, NULL, &th));
811: bottom = y - th;
812: for (i = levels - 1; i >= 0; i--) {
813: if (!mglevels[i]->smoothu || (mglevels[i]->smoothu == mglevels[i]->smoothd)) {
814: PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom));
815: PetscCall(KSPView(mglevels[i]->smoothd, viewer));
816: PetscCall(PetscDrawPopCurrentPoint(draw));
817: } else {
818: w = 0.5 * PetscMin(1.0 - x, x);
819: PetscCall(PetscDrawPushCurrentPoint(draw, x + w, bottom));
820: PetscCall(KSPView(mglevels[i]->smoothd, viewer));
821: PetscCall(PetscDrawPopCurrentPoint(draw));
822: PetscCall(PetscDrawPushCurrentPoint(draw, x - w, bottom));
823: PetscCall(KSPView(mglevels[i]->smoothu, viewer));
824: PetscCall(PetscDrawPopCurrentPoint(draw));
825: }
826: PetscCall(PetscDrawGetBoundingBox(draw, NULL, &bottom, NULL, NULL));
827: bottom -= th;
828: }
829: }
830: PetscFunctionReturn(PETSC_SUCCESS);
831: }
833: #include <petsc/private/kspimpl.h>
835: /*
836: Calls setup for the KSP on each level
837: */
838: PetscErrorCode PCSetUp_MG(PC pc)
839: {
840: PC_MG *mg = (PC_MG *)pc->data;
841: PC_MG_Levels **mglevels = mg->levels;
842: PetscInt i, n;
843: PC cpc;
844: PetscBool dump = PETSC_FALSE, opsset, use_amat, missinginterpolate = PETSC_FALSE;
845: Mat dA, dB;
846: Vec tvec;
847: DM *dms;
848: PetscViewer viewer = NULL;
849: PetscBool dAeqdB = PETSC_FALSE, needRestricts = PETSC_FALSE, doCR = PETSC_FALSE;
850: PetscBool adaptInterpolation = mg->adaptInterpolation;
852: PetscFunctionBegin;
853: PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels with PCMGSetLevels() before setting up");
854: n = mglevels[0]->levels;
855: /* FIX: Move this to PCSetFromOptions_MG? */
856: if (mg->usedmfornumberoflevels) {
857: PetscInt levels;
858: PetscCall(DMGetRefineLevel(pc->dm, &levels));
859: levels++;
860: if (levels > n) { /* the problem is now being solved on a finer grid */
861: PetscCall(PCMGSetLevels(pc, levels, NULL));
862: n = levels;
863: PetscCall(PCSetFromOptions(pc)); /* it is bad to call this here, but otherwise will never be called for the new hierarchy */
864: mglevels = mg->levels;
865: }
866: }
867: PetscCall(KSPGetPC(mglevels[0]->smoothd, &cpc));
869: /* If user did not provide fine grid operators OR operator was not updated since last global KSPSetOperators() */
870: /* so use those from global PC */
871: /* Is this what we always want? What if user wants to keep old one? */
872: PetscCall(KSPGetOperatorsSet(mglevels[n - 1]->smoothd, NULL, &opsset));
873: if (opsset) {
874: Mat mmat;
875: PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, NULL, &mmat));
876: if (mmat == pc->pmat) opsset = PETSC_FALSE;
877: }
879: /* Create CR solvers */
880: PetscCall(PCMGGetAdaptCR(pc, &doCR));
881: if (doCR) {
882: const char *prefix;
884: PetscCall(PCGetOptionsPrefix(pc, &prefix));
885: for (i = 1; i < n; ++i) {
886: PC ipc, cr;
887: char crprefix[128];
889: PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &mglevels[i]->cr));
890: PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->cr, PETSC_FALSE));
891: PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->cr, (PetscObject)pc, n - i));
892: PetscCall(KSPSetOptionsPrefix(mglevels[i]->cr, prefix));
893: PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->cr, PetscMGLevelId, mglevels[i]->level));
894: PetscCall(KSPSetType(mglevels[i]->cr, KSPCHEBYSHEV));
895: PetscCall(KSPSetConvergenceTest(mglevels[i]->cr, KSPConvergedSkip, NULL, NULL));
896: PetscCall(KSPSetNormType(mglevels[i]->cr, KSP_NORM_PRECONDITIONED));
897: PetscCall(KSPGetPC(mglevels[i]->cr, &ipc));
899: PetscCall(PCSetType(ipc, PCCOMPOSITE));
900: PetscCall(PCCompositeSetType(ipc, PC_COMPOSITE_MULTIPLICATIVE));
901: PetscCall(PCCompositeAddPCType(ipc, PCSOR));
902: PetscCall(CreateCR_Private(pc, i, &cr));
903: PetscCall(PCCompositeAddPC(ipc, cr));
904: PetscCall(PCDestroy(&cr));
906: PetscCall(KSPSetTolerances(mglevels[i]->cr, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, mg->default_smoothd));
907: PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE));
908: PetscCall(PetscSNPrintf(crprefix, 128, "mg_levels_%d_cr_", (int)i));
909: PetscCall(KSPAppendOptionsPrefix(mglevels[i]->cr, crprefix));
910: }
911: }
913: if (!opsset) {
914: PetscCall(PCGetUseAmat(pc, &use_amat));
915: if (use_amat) {
916: PetscCall(PetscInfo(pc, "Using outer operators to define finest grid operator \n because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n"));
917: PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->mat, pc->pmat));
918: } else {
919: PetscCall(PetscInfo(pc, "Using matrix (pmat) operators to define finest grid operator \n because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n"));
920: PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->pmat, pc->pmat));
921: }
922: }
924: for (i = n - 1; i > 0; i--) {
925: if (!(mglevels[i]->interpolate || mglevels[i]->restrct)) {
926: missinginterpolate = PETSC_TRUE;
927: break;
928: }
929: }
931: PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, &dA, &dB));
932: if (dA == dB) dAeqdB = PETSC_TRUE;
933: if (mg->galerkin == PC_MG_GALERKIN_NONE || ((mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_MAT) && !dAeqdB)) {
934: needRestricts = PETSC_TRUE; /* user must compute either mat, pmat, or both so must restrict x to coarser levels */
935: }
937: if (pc->dm && !pc->setupcalled) {
938: /* finest smoother also gets DM but it is not active, independent of whether galerkin==PC_MG_GALERKIN_EXTERNAL */
939: PetscCall(KSPSetDM(mglevels[n - 1]->smoothd, pc->dm));
940: PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothd, PETSC_FALSE));
941: if (mglevels[n - 1]->smoothd != mglevels[n - 1]->smoothu) {
942: PetscCall(KSPSetDM(mglevels[n - 1]->smoothu, pc->dm));
943: PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothu, PETSC_FALSE));
944: }
945: if (mglevels[n - 1]->cr) {
946: PetscCall(KSPSetDM(mglevels[n - 1]->cr, pc->dm));
947: PetscCall(KSPSetDMActive(mglevels[n - 1]->cr, PETSC_FALSE));
948: }
949: }
951: /*
952: Skipping if user has provided all interpolation/restriction needed (since DM might not be able to produce them (when coming from SNES/TS)
953: Skipping for externally managed hierarchy (such as ML and GAMG). Cleaner logic here would be great. Wrap ML/GAMG as DMs?
954: */
955: if (missinginterpolate && mg->galerkin != PC_MG_GALERKIN_EXTERNAL && !pc->setupcalled) {
956: /* first see if we can compute a coarse space */
957: if (mg->coarseSpaceType == PCMG_ADAPT_GDSW) {
958: for (i = n - 2; i > -1; i--) {
959: if (!mglevels[i + 1]->restrct && !mglevels[i + 1]->interpolate) {
960: PetscCall(PCMGComputeCoarseSpace_Internal(pc, i + 1, mg->coarseSpaceType, mg->Nc, NULL, &mglevels[i + 1]->coarseSpace));
961: PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->coarseSpace));
962: }
963: }
964: } else { /* construct the interpolation from the DMs */
965: Mat p;
966: Vec rscale;
967: PetscCall(PetscMalloc1(n, &dms));
968: dms[n - 1] = pc->dm;
969: /* Separately create them so we do not get DMKSP interference between levels */
970: for (i = n - 2; i > -1; i--) PetscCall(DMCoarsen(dms[i + 1], MPI_COMM_NULL, &dms[i]));
971: for (i = n - 2; i > -1; i--) {
972: DMKSP kdm;
973: PetscBool dmhasrestrict, dmhasinject;
974: PetscCall(KSPSetDM(mglevels[i]->smoothd, dms[i]));
975: if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothd, PETSC_FALSE));
976: if (mglevels[i]->smoothd != mglevels[i]->smoothu) {
977: PetscCall(KSPSetDM(mglevels[i]->smoothu, dms[i]));
978: if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothu, PETSC_FALSE));
979: }
980: if (mglevels[i]->cr) {
981: PetscCall(KSPSetDM(mglevels[i]->cr, dms[i]));
982: if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->cr, PETSC_FALSE));
983: }
984: PetscCall(DMGetDMKSPWrite(dms[i], &kdm));
985: /* Ugly hack so that the next KSPSetUp() will use the RHS that we set. A better fix is to change dmActive to take
986: * a bitwise OR of computing the matrix, RHS, and initial iterate. */
987: kdm->ops->computerhs = NULL;
988: kdm->rhsctx = NULL;
989: if (!mglevels[i + 1]->interpolate) {
990: PetscCall(DMCreateInterpolation(dms[i], dms[i + 1], &p, &rscale));
991: PetscCall(PCMGSetInterpolation(pc, i + 1, p));
992: if (rscale) PetscCall(PCMGSetRScale(pc, i + 1, rscale));
993: PetscCall(VecDestroy(&rscale));
994: PetscCall(MatDestroy(&p));
995: }
996: PetscCall(DMHasCreateRestriction(dms[i], &dmhasrestrict));
997: if (dmhasrestrict && !mglevels[i + 1]->restrct) {
998: PetscCall(DMCreateRestriction(dms[i], dms[i + 1], &p));
999: PetscCall(PCMGSetRestriction(pc, i + 1, p));
1000: PetscCall(MatDestroy(&p));
1001: }
1002: PetscCall(DMHasCreateInjection(dms[i], &dmhasinject));
1003: if (dmhasinject && !mglevels[i + 1]->inject) {
1004: PetscCall(DMCreateInjection(dms[i], dms[i + 1], &p));
1005: PetscCall(PCMGSetInjection(pc, i + 1, p));
1006: PetscCall(MatDestroy(&p));
1007: }
1008: }
1010: for (i = n - 2; i > -1; i--) PetscCall(DMDestroy(&dms[i]));
1011: PetscCall(PetscFree(dms));
1012: }
1013: }
1015: if (mg->galerkin < PC_MG_GALERKIN_NONE) {
1016: Mat A, B;
1017: PetscBool doA = PETSC_FALSE, doB = PETSC_FALSE;
1018: MatReuse reuse = MAT_INITIAL_MATRIX;
1020: if (mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_BOTH) doB = PETSC_TRUE;
1021: if (mg->galerkin == PC_MG_GALERKIN_MAT || (mg->galerkin == PC_MG_GALERKIN_BOTH && dA != dB)) doA = PETSC_TRUE;
1022: if (pc->setupcalled) reuse = MAT_REUSE_MATRIX;
1023: for (i = n - 2; i > -1; i--) {
1024: PetscCheck(mglevels[i + 1]->restrct || mglevels[i + 1]->interpolate, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must provide interpolation or restriction for each MG level except level 0");
1025: if (!mglevels[i + 1]->interpolate) PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->restrct));
1026: if (!mglevels[i + 1]->restrct) PetscCall(PCMGSetRestriction(pc, i + 1, mglevels[i + 1]->interpolate));
1027: if (reuse == MAT_REUSE_MATRIX) PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, &B));
1028: if (doA) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dA, mglevels[i + 1]->interpolate, reuse, 1.0, &A));
1029: if (doB) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dB, mglevels[i + 1]->interpolate, reuse, 1.0, &B));
1030: /* the management of the PetscObjectReference() and PetscObjecDereference() below is rather delicate */
1031: if (!doA && dAeqdB) {
1032: if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)B));
1033: A = B;
1034: } else if (!doA && reuse == MAT_INITIAL_MATRIX) {
1035: PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, NULL));
1036: PetscCall(PetscObjectReference((PetscObject)A));
1037: }
1038: if (!doB && dAeqdB) {
1039: if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)A));
1040: B = A;
1041: } else if (!doB && reuse == MAT_INITIAL_MATRIX) {
1042: PetscCall(KSPGetOperators(mglevels[i]->smoothd, NULL, &B));
1043: PetscCall(PetscObjectReference((PetscObject)B));
1044: }
1045: if (reuse == MAT_INITIAL_MATRIX) {
1046: PetscCall(KSPSetOperators(mglevels[i]->smoothd, A, B));
1047: PetscCall(PetscObjectDereference((PetscObject)A));
1048: PetscCall(PetscObjectDereference((PetscObject)B));
1049: }
1050: dA = A;
1051: dB = B;
1052: }
1053: }
1055: /* Adapt interpolation matrices */
1056: if (adaptInterpolation) {
1057: for (i = 0; i < n; ++i) {
1058: if (!mglevels[i]->coarseSpace) PetscCall(PCMGComputeCoarseSpace_Internal(pc, i, mg->coarseSpaceType, mg->Nc, !i ? NULL : mglevels[i - 1]->coarseSpace, &mglevels[i]->coarseSpace));
1059: if (i) PetscCall(PCMGAdaptInterpolator_Internal(pc, i, mglevels[i - 1]->smoothu, mglevels[i]->smoothu, mglevels[i - 1]->coarseSpace, mglevels[i]->coarseSpace));
1060: }
1061: for (i = n - 2; i > -1; --i) PetscCall(PCMGRecomputeLevelOperators_Internal(pc, i));
1062: }
1064: if (needRestricts && pc->dm) {
1065: for (i = n - 2; i >= 0; i--) {
1066: DM dmfine, dmcoarse;
1067: Mat Restrict, Inject;
1068: Vec rscale;
1069: PetscCall(KSPGetDM(mglevels[i + 1]->smoothd, &dmfine));
1070: PetscCall(KSPGetDM(mglevels[i]->smoothd, &dmcoarse));
1071: PetscCall(PCMGGetRestriction(pc, i + 1, &Restrict));
1072: PetscCall(PCMGGetRScale(pc, i + 1, &rscale));
1073: PetscCall(PCMGGetInjection(pc, i + 1, &Inject));
1074: PetscCall(DMRestrict(dmfine, Restrict, rscale, Inject, dmcoarse));
1075: }
1076: }
1078: if (!pc->setupcalled) {
1079: for (i = 0; i < n; i++) PetscCall(KSPSetFromOptions(mglevels[i]->smoothd));
1080: for (i = 1; i < n; i++) {
1081: if (mglevels[i]->smoothu && (mglevels[i]->smoothu != mglevels[i]->smoothd)) PetscCall(KSPSetFromOptions(mglevels[i]->smoothu));
1082: if (mglevels[i]->cr) PetscCall(KSPSetFromOptions(mglevels[i]->cr));
1083: }
1084: /* insure that if either interpolation or restriction is set the other other one is set */
1085: for (i = 1; i < n; i++) {
1086: PetscCall(PCMGGetInterpolation(pc, i, NULL));
1087: PetscCall(PCMGGetRestriction(pc, i, NULL));
1088: }
1089: for (i = 0; i < n - 1; i++) {
1090: if (!mglevels[i]->b) {
1091: Vec *vec;
1092: PetscCall(KSPCreateVecs(mglevels[i]->smoothd, 1, &vec, 0, NULL));
1093: PetscCall(PCMGSetRhs(pc, i, *vec));
1094: PetscCall(VecDestroy(vec));
1095: PetscCall(PetscFree(vec));
1096: }
1097: if (!mglevels[i]->r && i) {
1098: PetscCall(VecDuplicate(mglevels[i]->b, &tvec));
1099: PetscCall(PCMGSetR(pc, i, tvec));
1100: PetscCall(VecDestroy(&tvec));
1101: }
1102: if (!mglevels[i]->x) {
1103: PetscCall(VecDuplicate(mglevels[i]->b, &tvec));
1104: PetscCall(PCMGSetX(pc, i, tvec));
1105: PetscCall(VecDestroy(&tvec));
1106: }
1107: if (doCR) {
1108: PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crx));
1109: PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crb));
1110: PetscCall(VecZeroEntries(mglevels[i]->crb));
1111: }
1112: }
1113: if (n != 1 && !mglevels[n - 1]->r) {
1114: /* PCMGSetR() on the finest level if user did not supply it */
1115: Vec *vec;
1116: PetscCall(KSPCreateVecs(mglevels[n - 1]->smoothd, 1, &vec, 0, NULL));
1117: PetscCall(PCMGSetR(pc, n - 1, *vec));
1118: PetscCall(VecDestroy(vec));
1119: PetscCall(PetscFree(vec));
1120: }
1121: if (doCR) {
1122: PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crx));
1123: PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crb));
1124: PetscCall(VecZeroEntries(mglevels[n - 1]->crb));
1125: }
1126: }
1128: if (pc->dm) {
1129: /* need to tell all the coarser levels to rebuild the matrix using the DM for that level */
1130: for (i = 0; i < n - 1; i++) {
1131: if (mglevels[i]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[i]->smoothd->setupstage = KSP_SETUP_NEWMATRIX;
1132: }
1133: }
1134: // We got here (PCSetUp_MG) because the matrix has changed, which means the smoother needs to be set up again (e.g.,
1135: // new diagonal for Jacobi). Setting it here allows it to be logged under PCSetUp rather than deep inside a PCApply.
1136: if (mglevels[n - 1]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[n - 1]->smoothd->setupstage = KSP_SETUP_NEWMATRIX;
1138: for (i = 1; i < n; i++) {
1139: if (mglevels[i]->smoothu == mglevels[i]->smoothd || mg->am == PC_MG_FULL || mg->am == PC_MG_KASKADE || mg->cyclesperpcapply > 1) {
1140: /* if doing only down then initial guess is zero */
1141: PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE));
1142: }
1143: if (mglevels[i]->cr) PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE));
1144: if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1145: PetscCall(KSPSetUp(mglevels[i]->smoothd));
1146: if (mglevels[i]->smoothd->reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
1147: if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1148: if (!mglevels[i]->residual) {
1149: Mat mat;
1150: PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL));
1151: PetscCall(PCMGSetResidual(pc, i, PCMGResidualDefault, mat));
1152: }
1153: if (!mglevels[i]->residualtranspose) {
1154: Mat mat;
1155: PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL));
1156: PetscCall(PCMGSetResidualTranspose(pc, i, PCMGResidualTransposeDefault, mat));
1157: }
1158: }
1159: for (i = 1; i < n; i++) {
1160: if (mglevels[i]->smoothu && mglevels[i]->smoothu != mglevels[i]->smoothd) {
1161: Mat downmat, downpmat;
1163: /* check if operators have been set for up, if not use down operators to set them */
1164: PetscCall(KSPGetOperatorsSet(mglevels[i]->smoothu, &opsset, NULL));
1165: if (!opsset) {
1166: PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat));
1167: PetscCall(KSPSetOperators(mglevels[i]->smoothu, downmat, downpmat));
1168: }
1170: PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothu, PETSC_TRUE));
1171: if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1172: PetscCall(KSPSetUp(mglevels[i]->smoothu));
1173: if (mglevels[i]->smoothu->reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
1174: if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1175: }
1176: if (mglevels[i]->cr) {
1177: Mat downmat, downpmat;
1179: /* check if operators have been set for up, if not use down operators to set them */
1180: PetscCall(KSPGetOperatorsSet(mglevels[i]->cr, &opsset, NULL));
1181: if (!opsset) {
1182: PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat));
1183: PetscCall(KSPSetOperators(mglevels[i]->cr, downmat, downpmat));
1184: }
1186: PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE));
1187: if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1188: PetscCall(KSPSetUp(mglevels[i]->cr));
1189: if (mglevels[i]->cr->reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
1190: if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1191: }
1192: }
1194: if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0));
1195: PetscCall(KSPSetUp(mglevels[0]->smoothd));
1196: if (mglevels[0]->smoothd->reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
1197: if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0));
1199: /*
1200: Dump the interpolation/restriction matrices plus the
1201: Jacobian/stiffness on each level. This allows MATLAB users to
1202: easily check if the Galerkin condition A_c = R A_f R^T is satisfied.
1204: Only support one or the other at the same time.
1205: */
1206: #if defined(PETSC_USE_SOCKET_VIEWER)
1207: PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_matlab", &dump, NULL));
1208: if (dump) viewer = PETSC_VIEWER_SOCKET_(PetscObjectComm((PetscObject)pc));
1209: dump = PETSC_FALSE;
1210: #endif
1211: PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_binary", &dump, NULL));
1212: if (dump) viewer = PETSC_VIEWER_BINARY_(PetscObjectComm((PetscObject)pc));
1214: if (viewer) {
1215: for (i = 1; i < n; i++) PetscCall(MatView(mglevels[i]->restrct, viewer));
1216: for (i = 0; i < n; i++) {
1217: PetscCall(KSPGetPC(mglevels[i]->smoothd, &pc));
1218: PetscCall(MatView(pc->mat, viewer));
1219: }
1220: }
1221: PetscFunctionReturn(PETSC_SUCCESS);
1222: }
1224: /* -------------------------------------------------------------------------------------*/
1226: PetscErrorCode PCMGGetLevels_MG(PC pc, PetscInt *levels)
1227: {
1228: PC_MG *mg = (PC_MG *)pc->data;
1230: PetscFunctionBegin;
1231: *levels = mg->nlevels;
1232: PetscFunctionReturn(PETSC_SUCCESS);
1233: }
1235: /*@
1236: PCMGGetLevels - Gets the number of levels to use with `PCMG`.
1238: Not Collective
1240: Input Parameter:
1241: . pc - the preconditioner context
1243: Output parameter:
1244: . levels - the number of levels
1246: Level: advanced
1248: .seealso: `PCMG`, `PCMGSetLevels()`
1249: @*/
1250: PetscErrorCode PCMGGetLevels(PC pc, PetscInt *levels)
1251: {
1252: PetscFunctionBegin;
1255: *levels = 0;
1256: PetscTryMethod(pc, "PCMGGetLevels_C", (PC, PetscInt *), (pc, levels));
1257: PetscFunctionReturn(PETSC_SUCCESS);
1258: }
1260: /*@
1261: PCMGGetGridComplexity - compute operator and grid complexity of the `PCMG` hierarchy
1263: Input Parameter:
1264: . pc - the preconditioner context
1266: Output Parameters:
1267: + gc - grid complexity = sum_i(n_i) / n_0
1268: - oc - operator complexity = sum_i(nnz_i) / nnz_0
1270: Level: advanced
1272: Note:
1273: This is often call the operator complexity in multigrid literature
1275: .seealso: `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`
1276: @*/
1277: PetscErrorCode PCMGGetGridComplexity(PC pc, PetscReal *gc, PetscReal *oc)
1278: {
1279: PC_MG *mg = (PC_MG *)pc->data;
1280: PC_MG_Levels **mglevels = mg->levels;
1281: PetscInt lev, N;
1282: PetscLogDouble nnz0 = 0, sgc = 0, soc = 0, n0 = 0;
1283: MatInfo info;
1285: PetscFunctionBegin;
1289: if (!pc->setupcalled) {
1290: if (gc) *gc = 0;
1291: if (oc) *oc = 0;
1292: PetscFunctionReturn(PETSC_SUCCESS);
1293: }
1294: PetscCheck(mg->nlevels > 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MG has no levels");
1295: for (lev = 0; lev < mg->nlevels; lev++) {
1296: Mat dB;
1297: PetscCall(KSPGetOperators(mglevels[lev]->smoothd, NULL, &dB));
1298: PetscCall(MatGetInfo(dB, MAT_GLOBAL_SUM, &info)); /* global reduction */
1299: PetscCall(MatGetSize(dB, &N, NULL));
1300: sgc += N;
1301: soc += info.nz_used;
1302: if (lev == mg->nlevels - 1) {
1303: nnz0 = info.nz_used;
1304: n0 = N;
1305: }
1306: }
1307: PetscCheck(n0 > 0 && gc, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number for grid points on finest level is not available");
1308: *gc = (PetscReal)(sgc / n0);
1309: if (nnz0 > 0 && oc) *oc = (PetscReal)(soc / nnz0);
1310: PetscFunctionReturn(PETSC_SUCCESS);
1311: }
1313: /*@
1314: PCMGSetType - Determines the form of multigrid to use:
1315: multiplicative, additive, full, or the Kaskade algorithm.
1317: Logically Collective
1319: Input Parameters:
1320: + pc - the preconditioner context
1321: - form - multigrid form, one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE`
1323: Options Database Key:
1324: . -pc_mg_type <form> - Sets <form>, one of multiplicative, additive, full, kaskade
1326: Level: advanced
1328: .seealso: `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGGetType()`, `PCMGCycleType`
1329: @*/
1330: PetscErrorCode PCMGSetType(PC pc, PCMGType form)
1331: {
1332: PC_MG *mg = (PC_MG *)pc->data;
1334: PetscFunctionBegin;
1337: mg->am = form;
1338: if (form == PC_MG_MULTIPLICATIVE) pc->ops->applyrichardson = PCApplyRichardson_MG;
1339: else pc->ops->applyrichardson = NULL;
1340: PetscFunctionReturn(PETSC_SUCCESS);
1341: }
1343: /*@
1344: PCMGGetType - Finds the form of multigrid the `PCMG` is using multiplicative, additive, full, or the Kaskade algorithm.
1346: Logically Collective
1348: Input Parameter:
1349: . pc - the preconditioner context
1351: Output Parameter:
1352: . type - one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE`, `PCMGCycleType`
1354: Level: advanced
1356: .seealso: `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGSetType()`
1357: @*/
1358: PetscErrorCode PCMGGetType(PC pc, PCMGType *type)
1359: {
1360: PC_MG *mg = (PC_MG *)pc->data;
1362: PetscFunctionBegin;
1364: *type = mg->am;
1365: PetscFunctionReturn(PETSC_SUCCESS);
1366: }
1368: /*@
1369: PCMGSetCycleType - Sets the type cycles to use. Use `PCMGSetCycleTypeOnLevel()` for more
1370: complicated cycling.
1372: Logically Collective
1374: Input Parameters:
1375: + pc - the multigrid context
1376: - n - either `PC_MG_CYCLE_V` or `PC_MG_CYCLE_W`
1378: Options Database Key:
1379: . -pc_mg_cycle_type <v,w> - provide the cycle desired
1381: Level: advanced
1383: .seealso: `PCMG`, `PCMGSetCycleTypeOnLevel()`, `PCMGType`, `PCMGCycleType`
1384: @*/
1385: PetscErrorCode PCMGSetCycleType(PC pc, PCMGCycleType n)
1386: {
1387: PC_MG *mg = (PC_MG *)pc->data;
1388: PC_MG_Levels **mglevels = mg->levels;
1389: PetscInt i, levels;
1391: PetscFunctionBegin;
1394: PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling");
1395: levels = mglevels[0]->levels;
1396: for (i = 0; i < levels; i++) mglevels[i]->cycles = n;
1397: PetscFunctionReturn(PETSC_SUCCESS);
1398: }
1400: /*@
1401: PCMGMultiplicativeSetCycles - Sets the number of cycles to use for each preconditioner step
1402: of multigrid when `PCMGType` is `PC_MG_MULTIPLICATIVE`
1404: Logically Collective
1406: Input Parameters:
1407: + pc - the multigrid context
1408: - n - number of cycles (default is 1)
1410: Options Database Key:
1411: . -pc_mg_multiplicative_cycles n - set the number of cycles
1413: Level: advanced
1415: Note:
1416: This is not associated with setting a v or w cycle, that is set with `PCMGSetCycleType()`
1418: .seealso: `PCMGSetCycleTypeOnLevel()`, `PCMGSetCycleType()`, `PCMGCycleType`, `PCMGType`
1419: @*/
1420: PetscErrorCode PCMGMultiplicativeSetCycles(PC pc, PetscInt n)
1421: {
1422: PC_MG *mg = (PC_MG *)pc->data;
1424: PetscFunctionBegin;
1427: mg->cyclesperpcapply = n;
1428: PetscFunctionReturn(PETSC_SUCCESS);
1429: }
1431: PetscErrorCode PCMGSetGalerkin_MG(PC pc, PCMGGalerkinType use)
1432: {
1433: PC_MG *mg = (PC_MG *)pc->data;
1435: PetscFunctionBegin;
1436: mg->galerkin = use;
1437: PetscFunctionReturn(PETSC_SUCCESS);
1438: }
1440: /*@
1441: PCMGSetGalerkin - Causes the coarser grid matrices to be computed from the
1442: finest grid via the Galerkin process: A_i-1 = r_i * A_i * p_i
1444: Logically Collective
1446: Input Parameters:
1447: + pc - the multigrid context
1448: - use - one of `PC_MG_GALERKIN_BOTH`, `PC_MG_GALERKIN_PMAT`, `PC_MG_GALERKIN_MAT`, or `PC_MG_GALERKIN_NONE`
1450: Options Database Key:
1451: . -pc_mg_galerkin <both,pmat,mat,none> - set the matrices to form via the Galerkin process
1453: Level: intermediate
1455: Note:
1456: Some codes that use `PCMG` such as `PCGAMG` use Galerkin internally while constructing the hierarchy and thus do not
1457: use the `PCMG` construction of the coarser grids.
1459: .seealso: `PCMG`, `PCMGGetGalerkin()`, `PCMGGalerkinType`
1460: @*/
1461: PetscErrorCode PCMGSetGalerkin(PC pc, PCMGGalerkinType use)
1462: {
1463: PetscFunctionBegin;
1465: PetscTryMethod(pc, "PCMGSetGalerkin_C", (PC, PCMGGalerkinType), (pc, use));
1466: PetscFunctionReturn(PETSC_SUCCESS);
1467: }
1469: /*@
1470: PCMGGetGalerkin - Checks if Galerkin multigrid is being used, i.e. A_i-1 = r_i * A_i * p_i
1472: Not Collective
1474: Input Parameter:
1475: . pc - the multigrid context
1477: Output Parameter:
1478: . galerkin - one of `PC_MG_GALERKIN_BOTH`,`PC_MG_GALERKIN_PMAT`,`PC_MG_GALERKIN_MAT`, `PC_MG_GALERKIN_NONE`, or `PC_MG_GALERKIN_EXTERNAL`
1480: Level: intermediate
1482: .seealso: `PCMG`, `PCMGSetGalerkin()`, `PCMGGalerkinType`
1483: @*/
1484: PetscErrorCode PCMGGetGalerkin(PC pc, PCMGGalerkinType *galerkin)
1485: {
1486: PC_MG *mg = (PC_MG *)pc->data;
1488: PetscFunctionBegin;
1490: *galerkin = mg->galerkin;
1491: PetscFunctionReturn(PETSC_SUCCESS);
1492: }
1494: PetscErrorCode PCMGSetAdaptInterpolation_MG(PC pc, PetscBool adapt)
1495: {
1496: PC_MG *mg = (PC_MG *)pc->data;
1498: PetscFunctionBegin;
1499: mg->adaptInterpolation = adapt;
1500: PetscFunctionReturn(PETSC_SUCCESS);
1501: }
1503: PetscErrorCode PCMGGetAdaptInterpolation_MG(PC pc, PetscBool *adapt)
1504: {
1505: PC_MG *mg = (PC_MG *)pc->data;
1507: PetscFunctionBegin;
1508: *adapt = mg->adaptInterpolation;
1509: PetscFunctionReturn(PETSC_SUCCESS);
1510: }
1512: PetscErrorCode PCMGSetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType ctype)
1513: {
1514: PC_MG *mg = (PC_MG *)pc->data;
1516: PetscFunctionBegin;
1517: mg->adaptInterpolation = ctype != PCMG_ADAPT_NONE ? PETSC_TRUE : PETSC_FALSE;
1518: mg->coarseSpaceType = ctype;
1519: PetscFunctionReturn(PETSC_SUCCESS);
1520: }
1522: PetscErrorCode PCMGGetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType *ctype)
1523: {
1524: PC_MG *mg = (PC_MG *)pc->data;
1526: PetscFunctionBegin;
1527: *ctype = mg->coarseSpaceType;
1528: PetscFunctionReturn(PETSC_SUCCESS);
1529: }
1531: PetscErrorCode PCMGSetAdaptCR_MG(PC pc, PetscBool cr)
1532: {
1533: PC_MG *mg = (PC_MG *)pc->data;
1535: PetscFunctionBegin;
1536: mg->compatibleRelaxation = cr;
1537: PetscFunctionReturn(PETSC_SUCCESS);
1538: }
1540: PetscErrorCode PCMGGetAdaptCR_MG(PC pc, PetscBool *cr)
1541: {
1542: PC_MG *mg = (PC_MG *)pc->data;
1544: PetscFunctionBegin;
1545: *cr = mg->compatibleRelaxation;
1546: PetscFunctionReturn(PETSC_SUCCESS);
1547: }
1549: /*@C
1550: PCMGSetAdaptCoarseSpaceType - Set the type of adaptive coarse space.
1552: Adapts or creates the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated.
1554: Logically Collective
1556: Input Parameters:
1557: + pc - the multigrid context
1558: - ctype - the type of coarse space
1560: Options Database Keys:
1561: + -pc_mg_adapt_interp_n <int> - The number of modes to use
1562: - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: none, polynomial, harmonic, eigenvector, generalized_eigenvector, gdsw
1564: Level: intermediate
1566: .seealso: `PCMG`, `PCMGCoarseSpaceType`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()`
1567: @*/
1568: PetscErrorCode PCMGSetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType ctype)
1569: {
1570: PetscFunctionBegin;
1573: PetscTryMethod(pc, "PCMGSetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType), (pc, ctype));
1574: PetscFunctionReturn(PETSC_SUCCESS);
1575: }
1577: /*@C
1578: PCMGGetAdaptCoarseSpaceType - Get the type of adaptive coarse space.
1580: Not Collective
1582: Input Parameter:
1583: . pc - the multigrid context
1585: Output Parameter:
1586: . ctype - the type of coarse space
1588: Level: intermediate
1590: .seealso: `PCMG`, `PCMGCoarseSpaceType`, `PCMGSetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()`
1591: @*/
1592: PetscErrorCode PCMGGetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType *ctype)
1593: {
1594: PetscFunctionBegin;
1597: PetscUseMethod(pc, "PCMGGetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType *), (pc, ctype));
1598: PetscFunctionReturn(PETSC_SUCCESS);
1599: }
1601: /* MATT: REMOVE? */
1602: /*@
1603: PCMGSetAdaptInterpolation - Adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated.
1605: Logically Collective
1607: Input Parameters:
1608: + pc - the multigrid context
1609: - adapt - flag for adaptation of the interpolator
1611: Options Database Keys:
1612: + -pc_mg_adapt_interp - Turn on adaptation
1613: . -pc_mg_adapt_interp_n <int> - The number of modes to use, should be divisible by dimension
1614: - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: polynomial, harmonic, eigenvector, generalized_eigenvector
1616: Level: intermediate
1618: .seealso: `PCMG`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1619: @*/
1620: PetscErrorCode PCMGSetAdaptInterpolation(PC pc, PetscBool adapt)
1621: {
1622: PetscFunctionBegin;
1624: PetscTryMethod(pc, "PCMGSetAdaptInterpolation_C", (PC, PetscBool), (pc, adapt));
1625: PetscFunctionReturn(PETSC_SUCCESS);
1626: }
1628: /*@
1629: PCMGGetAdaptInterpolation - Get the flag to adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh,
1630: and thus accurately interpolated.
1632: Not Collective
1634: Input Parameter:
1635: . pc - the multigrid context
1637: Output Parameter:
1638: . adapt - flag for adaptation of the interpolator
1640: Level: intermediate
1642: .seealso: `PCMG`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1643: @*/
1644: PetscErrorCode PCMGGetAdaptInterpolation(PC pc, PetscBool *adapt)
1645: {
1646: PetscFunctionBegin;
1649: PetscUseMethod(pc, "PCMGGetAdaptInterpolation_C", (PC, PetscBool *), (pc, adapt));
1650: PetscFunctionReturn(PETSC_SUCCESS);
1651: }
1653: /*@
1654: PCMGSetAdaptCR - Monitor the coarse space quality using an auxiliary solve with compatible relaxation.
1656: Logically Collective
1658: Input Parameters:
1659: + pc - the multigrid context
1660: - cr - flag for compatible relaxation
1662: Options Database Key:
1663: . -pc_mg_adapt_cr - Turn on compatible relaxation
1665: Level: intermediate
1667: .seealso: `PCMG`, `PCMGGetAdaptCR()`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1668: @*/
1669: PetscErrorCode PCMGSetAdaptCR(PC pc, PetscBool cr)
1670: {
1671: PetscFunctionBegin;
1673: PetscTryMethod(pc, "PCMGSetAdaptCR_C", (PC, PetscBool), (pc, cr));
1674: PetscFunctionReturn(PETSC_SUCCESS);
1675: }
1677: /*@
1678: PCMGGetAdaptCR - Get the flag to monitor coarse space quality using an auxiliary solve with compatible relaxation.
1680: Not Collective
1682: Input Parameter:
1683: . pc - the multigrid context
1685: Output Parameter:
1686: . cr - flag for compatible relaxaion
1688: Level: intermediate
1690: .seealso: `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1691: @*/
1692: PetscErrorCode PCMGGetAdaptCR(PC pc, PetscBool *cr)
1693: {
1694: PetscFunctionBegin;
1697: PetscUseMethod(pc, "PCMGGetAdaptCR_C", (PC, PetscBool *), (pc, cr));
1698: PetscFunctionReturn(PETSC_SUCCESS);
1699: }
1701: /*@
1702: PCMGSetNumberSmooth - Sets the number of pre and post-smoothing steps to use
1703: on all levels. Use `PCMGDistinctSmoothUp()` to create separate up and down smoothers if you want different numbers of
1704: pre- and post-smoothing steps.
1706: Logically Collective
1708: Input Parameters:
1709: + mg - the multigrid context
1710: - n - the number of smoothing steps
1712: Options Database Key:
1713: . -mg_levels_ksp_max_it <n> - Sets number of pre and post-smoothing steps
1715: Level: advanced
1717: Note:
1718: This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid.
1720: .seealso: `PCMG`, `PCMGSetDistinctSmoothUp()`
1721: @*/
1722: PetscErrorCode PCMGSetNumberSmooth(PC pc, PetscInt n)
1723: {
1724: PC_MG *mg = (PC_MG *)pc->data;
1725: PC_MG_Levels **mglevels = mg->levels;
1726: PetscInt i, levels;
1728: PetscFunctionBegin;
1731: PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling");
1732: levels = mglevels[0]->levels;
1734: for (i = 1; i < levels; i++) {
1735: PetscCall(KSPSetTolerances(mglevels[i]->smoothu, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, n));
1736: PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, n));
1737: mg->default_smoothu = n;
1738: mg->default_smoothd = n;
1739: }
1740: PetscFunctionReturn(PETSC_SUCCESS);
1741: }
1743: /*@
1744: PCMGSetDistinctSmoothUp - sets the up (post) smoother to be a separate `KSP` from the down (pre) smoother on all levels
1745: and adds the suffix _up to the options name
1747: Logically Collective
1749: Input Parameter:
1750: . pc - the preconditioner context
1752: Options Database Key:
1753: . -pc_mg_distinct_smoothup <bool> - use distinct smoothing objects
1755: Level: advanced
1757: Note:
1758: This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid.
1760: .seealso: `PCMG`, `PCMGSetNumberSmooth()`
1761: @*/
1762: PetscErrorCode PCMGSetDistinctSmoothUp(PC pc)
1763: {
1764: PC_MG *mg = (PC_MG *)pc->data;
1765: PC_MG_Levels **mglevels = mg->levels;
1766: PetscInt i, levels;
1767: KSP subksp;
1769: PetscFunctionBegin;
1771: PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling");
1772: levels = mglevels[0]->levels;
1774: for (i = 1; i < levels; i++) {
1775: const char *prefix = NULL;
1776: /* make sure smoother up and down are different */
1777: PetscCall(PCMGGetSmootherUp(pc, i, &subksp));
1778: PetscCall(KSPGetOptionsPrefix(mglevels[i]->smoothd, &prefix));
1779: PetscCall(KSPSetOptionsPrefix(subksp, prefix));
1780: PetscCall(KSPAppendOptionsPrefix(subksp, "up_"));
1781: }
1782: PetscFunctionReturn(PETSC_SUCCESS);
1783: }
1785: /* No new matrices are created, and the coarse operator matrices are the references to the original ones */
1786: PetscErrorCode PCGetInterpolations_MG(PC pc, PetscInt *num_levels, Mat *interpolations[])
1787: {
1788: PC_MG *mg = (PC_MG *)pc->data;
1789: PC_MG_Levels **mglevels = mg->levels;
1790: Mat *mat;
1791: PetscInt l;
1793: PetscFunctionBegin;
1794: PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling");
1795: PetscCall(PetscMalloc1(mg->nlevels, &mat));
1796: for (l = 1; l < mg->nlevels; l++) {
1797: mat[l - 1] = mglevels[l]->interpolate;
1798: PetscCall(PetscObjectReference((PetscObject)mat[l - 1]));
1799: }
1800: *num_levels = mg->nlevels;
1801: *interpolations = mat;
1802: PetscFunctionReturn(PETSC_SUCCESS);
1803: }
1805: /* No new matrices are created, and the coarse operator matrices are the references to the original ones */
1806: PetscErrorCode PCGetCoarseOperators_MG(PC pc, PetscInt *num_levels, Mat *coarseOperators[])
1807: {
1808: PC_MG *mg = (PC_MG *)pc->data;
1809: PC_MG_Levels **mglevels = mg->levels;
1810: PetscInt l;
1811: Mat *mat;
1813: PetscFunctionBegin;
1814: PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling");
1815: PetscCall(PetscMalloc1(mg->nlevels, &mat));
1816: for (l = 0; l < mg->nlevels - 1; l++) {
1817: PetscCall(KSPGetOperators(mglevels[l]->smoothd, NULL, &(mat[l])));
1818: PetscCall(PetscObjectReference((PetscObject)mat[l]));
1819: }
1820: *num_levels = mg->nlevels;
1821: *coarseOperators = mat;
1822: PetscFunctionReturn(PETSC_SUCCESS);
1823: }
1825: /*@C
1826: PCMGRegisterCoarseSpaceConstructor - Adds a method to the `PCMG` package for coarse space construction.
1828: Not Collective
1830: Input Parameters:
1831: + name - name of the constructor
1832: - function - constructor routine
1834: Calling sequence of `function`:
1835: $ PetscErrorCode my_csp(PC pc, PetscInt l, DM dm, KSP smooth, PetscInt Nc, Mat initGuess, Mat *coarseSp)
1836: + pc - The `PC` object
1837: . l - The multigrid level, 0 is the coarse level
1838: . dm - The `DM` for this level
1839: . smooth - The level smoother
1840: . Nc - The size of the coarse space
1841: . initGuess - Basis for an initial guess for the space
1842: - coarseSp - A basis for the computed coarse space
1844: Level: advanced
1846: Developer Note:
1847: How come this is not used by `PCGAMG`?
1849: .seealso: `PCMG`, `PCMGGetCoarseSpaceConstructor()`, `PCRegister()`
1850: @*/
1851: PetscErrorCode PCMGRegisterCoarseSpaceConstructor(const char name[], PetscErrorCode (*function)(PC, PetscInt, DM, KSP, PetscInt, Mat, Mat *))
1852: {
1853: PetscFunctionBegin;
1854: PetscCall(PCInitializePackage());
1855: PetscCall(PetscFunctionListAdd(&PCMGCoarseList, name, function));
1856: PetscFunctionReturn(PETSC_SUCCESS);
1857: }
1859: /*@C
1860: PCMGGetCoarseSpaceConstructor - Returns the given coarse space construction method.
1862: Not Collective
1864: Input Parameter:
1865: . name - name of the constructor
1867: Output Parameter:
1868: . function - constructor routine
1870: Level: advanced
1872: .seealso: `PCMG`, `PCMGRegisterCoarseSpaceConstructor()`, `PCRegister()`
1873: @*/
1874: PetscErrorCode PCMGGetCoarseSpaceConstructor(const char name[], PetscErrorCode (**function)(PC, PetscInt, DM, KSP, PetscInt, Mat, Mat *))
1875: {
1876: PetscFunctionBegin;
1877: PetscCall(PetscFunctionListFind(PCMGCoarseList, name, function));
1878: PetscFunctionReturn(PETSC_SUCCESS);
1879: }
1881: /*MC
1882: PCMG - Use multigrid preconditioning. This preconditioner requires you provide additional
1883: information about the coarser grid matrices and restriction/interpolation operators.
1885: Options Database Keys:
1886: + -pc_mg_levels <nlevels> - number of levels including finest
1887: . -pc_mg_cycle_type <v,w> - provide the cycle desired
1888: . -pc_mg_type <additive,multiplicative,full,kaskade> - multiplicative is the default
1889: . -pc_mg_log - log information about time spent on each level of the solver
1890: . -pc_mg_distinct_smoothup - configure up (after interpolation) and down (before restriction) smoothers separately (with different options prefixes)
1891: . -pc_mg_galerkin <both,pmat,mat,none> - use Galerkin process to compute coarser operators, i.e. Acoarse = R A R'
1892: . -pc_mg_multiplicative_cycles - number of cycles to use as the preconditioner (defaults to 1)
1893: . -pc_mg_dump_matlab - dumps the matrices for each level and the restriction/interpolation matrices
1894: to the Socket viewer for reading from MATLAB.
1895: - -pc_mg_dump_binary - dumps the matrices for each level and the restriction/interpolation matrices
1896: to the binary output file called binaryoutput
1898: Level: intermediate
1900: Notes:
1901: If one uses a Krylov method such `KSPGMRES` or `KSPCG` as the smoother then one must use `KSPFGMRES`, `KSPGCR`, or `KSPRICHARDSON` as the outer Krylov method
1903: When run with a single level the smoother options are used on that level NOT the coarse grid solver options
1905: When run with `KSPRICHARDSON` the convergence test changes slightly if monitor is turned on. The iteration count may change slightly. This
1906: is because without monitoring the residual norm is computed WITHIN each multigrid cycle on the finest level after the pre-smoothing
1907: (because the residual has just been computed for the multigrid algorithm and is hence available for free) while with monitoring the
1908: residual is computed at the end of each cycle.
1910: .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCMGType`, `PCEXOTIC`, `PCGAMG`, `PCML`, `PCHYPRE`
1911: `PCMGSetLevels()`, `PCMGGetLevels()`, `PCMGSetType()`, `PCMGSetCycleType()`,
1912: `PCMGSetDistinctSmoothUp()`, `PCMGGetCoarseSolve()`, `PCMGSetResidual()`, `PCMGSetInterpolation()`,
1913: `PCMGSetRestriction()`, `PCMGGetSmoother()`, `PCMGGetSmootherUp()`, `PCMGGetSmootherDown()`,
1914: `PCMGSetCycleTypeOnLevel()`, `PCMGSetRhs()`, `PCMGSetX()`, `PCMGSetR()`,
1915: `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1916: M*/
1918: PETSC_EXTERN PetscErrorCode PCCreate_MG(PC pc)
1919: {
1920: PC_MG *mg;
1922: PetscFunctionBegin;
1923: PetscCall(PetscNew(&mg));
1924: pc->data = mg;
1925: mg->nlevels = -1;
1926: mg->am = PC_MG_MULTIPLICATIVE;
1927: mg->galerkin = PC_MG_GALERKIN_NONE;
1928: mg->adaptInterpolation = PETSC_FALSE;
1929: mg->Nc = -1;
1930: mg->eigenvalue = -1;
1932: pc->useAmat = PETSC_TRUE;
1934: pc->ops->apply = PCApply_MG;
1935: pc->ops->applytranspose = PCApplyTranspose_MG;
1936: pc->ops->matapply = PCMatApply_MG;
1937: pc->ops->setup = PCSetUp_MG;
1938: pc->ops->reset = PCReset_MG;
1939: pc->ops->destroy = PCDestroy_MG;
1940: pc->ops->setfromoptions = PCSetFromOptions_MG;
1941: pc->ops->view = PCView_MG;
1943: PetscCall(PetscObjectComposedDataRegister(&mg->eigenvalue));
1944: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", PCMGSetGalerkin_MG));
1945: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", PCMGGetLevels_MG));
1946: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", PCMGSetLevels_MG));
1947: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_MG));
1948: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_MG));
1949: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", PCMGSetAdaptInterpolation_MG));
1950: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", PCMGGetAdaptInterpolation_MG));
1951: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", PCMGSetAdaptCR_MG));
1952: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", PCMGGetAdaptCR_MG));
1953: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", PCMGSetAdaptCoarseSpaceType_MG));
1954: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", PCMGGetAdaptCoarseSpaceType_MG));
1955: PetscFunctionReturn(PETSC_SUCCESS);
1956: }