Actual source code: telescope_coarsedm.c
2: #include <petsc/private/matimpl.h>
3: #include <petsc/private/pcimpl.h>
4: #include <petsc/private/dmimpl.h>
5: #include <petscksp.h>
6: #include <petscdm.h>
7: #include <petscdmda.h>
8: #include <petscdmshell.h>
10: #include "../src/ksp/pc/impls/telescope/telescope.h"
12: static PetscBool cited = PETSC_FALSE;
13: static const char citation[] = "@inproceedings{MaySananRuppKnepleySmith2016,\n"
14: " title = {Extreme-Scale Multigrid Components within PETSc},\n"
15: " author = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
16: " booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
17: " series = {PASC '16},\n"
18: " isbn = {978-1-4503-4126-4},\n"
19: " location = {Lausanne, Switzerland},\n"
20: " pages = {5:1--5:12},\n"
21: " articleno = {5},\n"
22: " numpages = {12},\n"
23: " url = {https://doi.acm.org/10.1145/2929908.2929913},\n"
24: " doi = {10.1145/2929908.2929913},\n"
25: " acmid = {2929913},\n"
26: " publisher = {ACM},\n"
27: " address = {New York, NY, USA},\n"
28: " keywords = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
29: " year = {2016}\n"
30: "}\n";
32: typedef struct {
33: DM dm_fine, dm_coarse; /* these DM's should be topologically identical but use different communicators */
34: Mat permutation;
35: Vec xp;
36: PetscErrorCode (*fp_dm_field_scatter)(DM, Vec, ScatterMode, DM, Vec);
37: PetscErrorCode (*fp_dm_state_scatter)(DM, ScatterMode, DM);
38: void *dmksp_context_determined;
39: void *dmksp_context_user;
40: } PC_Telescope_CoarseDMCtx;
42: PetscErrorCode PCTelescopeSetUp_scatters_CoarseDM(PC pc, PC_Telescope sred, PC_Telescope_CoarseDMCtx *ctx)
43: {
44: Vec xred, yred, xtmp, x, xp;
45: VecScatter scatter;
46: IS isin;
47: Mat B;
48: PetscInt m, bs, st, ed;
49: MPI_Comm comm;
51: PetscFunctionBegin;
52: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
53: PetscCall(PCGetOperators(pc, NULL, &B));
54: PetscCall(MatCreateVecs(B, &x, NULL));
55: PetscCall(MatGetBlockSize(B, &bs));
56: PetscCall(VecDuplicate(x, &xp));
57: m = 0;
58: xred = NULL;
59: yred = NULL;
60: if (PCTelescope_isActiveRank(sred)) {
61: PetscCall(DMCreateGlobalVector(ctx->dm_coarse, &xred));
62: PetscCall(VecDuplicate(xred, &yred));
63: PetscCall(VecGetOwnershipRange(xred, &st, &ed));
64: PetscCall(ISCreateStride(comm, ed - st, st, 1, &isin));
65: PetscCall(VecGetLocalSize(xred, &m));
66: } else {
67: PetscCall(VecGetOwnershipRange(x, &st, &ed));
68: PetscCall(ISCreateStride(comm, 0, st, 1, &isin));
69: }
70: PetscCall(ISSetBlockSize(isin, bs));
71: PetscCall(VecCreate(comm, &xtmp));
72: PetscCall(VecSetSizes(xtmp, m, PETSC_DECIDE));
73: PetscCall(VecSetBlockSize(xtmp, bs));
74: PetscCall(VecSetType(xtmp, ((PetscObject)x)->type_name));
75: PetscCall(VecScatterCreate(x, isin, xtmp, NULL, &scatter));
76: sred->xred = xred;
77: sred->yred = yred;
78: sred->isin = isin;
79: sred->scatter = scatter;
80: sred->xtmp = xtmp;
81: ctx->xp = xp;
82: PetscCall(VecDestroy(&x));
83: PetscFunctionReturn(PETSC_SUCCESS);
84: }
86: PetscErrorCode PCTelescopeSetUp_CoarseDM(PC pc, PC_Telescope sred)
87: {
88: PC_Telescope_CoarseDMCtx *ctx;
89: DM dm, dm_coarse = NULL;
90: MPI_Comm comm;
91: PetscBool has_perm, has_kspcomputeoperators, using_kspcomputeoperators;
93: PetscFunctionBegin;
94: PetscCall(PetscInfo(pc, "PCTelescope: setup (CoarseDM)\n"));
95: PetscCall(PetscNew(&ctx));
96: sred->dm_ctx = (void *)ctx;
98: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
99: PetscCall(PCGetDM(pc, &dm));
100: PetscCall(DMGetCoarseDM(dm, &dm_coarse));
101: ctx->dm_fine = dm;
102: ctx->dm_coarse = dm_coarse;
104: /* attach coarse dm to ksp on sub communicator */
105: if (PCTelescope_isActiveRank(sred)) {
106: PetscCall(KSPSetDM(sred->ksp, ctx->dm_coarse));
107: if (sred->ignore_kspcomputeoperators) PetscCall(KSPSetDMActive(sred->ksp, PETSC_FALSE));
108: }
110: /* check if there is a method to provide a permutation */
111: has_perm = PETSC_FALSE;
112: has_kspcomputeoperators = PETSC_FALSE;
113: using_kspcomputeoperators = PETSC_FALSE;
115: /* if no permutation is provided, we must rely on KSPSetComputeOperators */
116: {
117: PetscErrorCode (*dmfine_kspfunc)(KSP, Mat, Mat, void *) = NULL;
118: void *dmfine_kspctx = NULL, *dmcoarse_kspctx = NULL;
119: void *dmfine_appctx = NULL, *dmcoarse_appctx = NULL;
120: void *dmfine_shellctx = NULL, *dmcoarse_shellctx = NULL;
122: PetscCall(DMKSPGetComputeOperators(dm, &dmfine_kspfunc, &dmfine_kspctx));
123: if (dmfine_kspfunc) has_kspcomputeoperators = PETSC_TRUE;
125: PetscCall(DMGetApplicationContext(ctx->dm_fine, &dmfine_appctx));
126: PetscCall(DMShellGetContext(ctx->dm_fine, &dmfine_shellctx));
128: /* need to define dmcoarse_kspctx */
129: if (dmfine_kspfunc && !sred->ignore_kspcomputeoperators) {
130: PetscCall(PetscInfo(pc, "PCTelescope: KSPSetComputeOperators fetched from parent DM\n"));
131: if (PCTelescope_isActiveRank(sred)) {
132: PetscCall(DMGetApplicationContext(ctx->dm_coarse, &dmcoarse_appctx));
133: PetscCall(DMShellGetContext(ctx->dm_coarse, &dmcoarse_shellctx));
134: }
136: /* Assume that if the fine operator didn't require any context, neither will the coarse */
137: if (!dmfine_kspctx) {
138: dmcoarse_kspctx = NULL;
139: PetscCall(PetscInfo(pc, "PCTelescope: KSPSetComputeOperators using NULL context\n"));
140: } else {
141: PetscCall(PetscInfo(pc, "PCTelescope: KSPSetComputeOperators detected non-NULL context from parent DM \n"));
142: if (PCTelescope_isActiveRank(sred)) {
143: if (dmfine_kspctx == dmfine_appctx) {
144: dmcoarse_kspctx = dmcoarse_appctx;
145: PetscCall(PetscInfo(pc, "PCTelescope: KSPSetComputeOperators using context from DM->ApplicationContext\n"));
146: PetscCheck(dmcoarse_kspctx, PETSC_COMM_SELF, PETSC_ERR_USER, "Non NULL dmfine->kspctx == dmfine->appctx. NULL dmcoarse->appctx found. Likely this is an error");
147: } else if (dmfine_kspctx == dmfine_shellctx) {
148: dmcoarse_kspctx = dmcoarse_shellctx;
149: PetscCall(PetscInfo(pc, "PCTelescope: KSPSetComputeOperators using context from DMShell->Context\n"));
150: PetscCheck(dmcoarse_kspctx, PETSC_COMM_SELF, PETSC_ERR_USER, "Non NULL dmfine->kspctx == dmfine.shell->ctx. NULL dmcoarse.shell->ctx found. Likely this is an error");
151: }
152: ctx->dmksp_context_determined = dmcoarse_kspctx;
154: /* look for user provided method to fetch the context */
155: {
156: PetscErrorCode (*fp_get_coarsedm_context)(DM, void **) = NULL;
157: void *dmcoarse_context_user = NULL;
158: char dmcoarse_method[PETSC_MAX_PATH_LEN];
160: PetscCall(PetscSNPrintf(dmcoarse_method, sizeof(dmcoarse_method), "PCTelescopeGetCoarseDMKSPContext"));
161: PetscCall(PetscObjectQueryFunction((PetscObject)ctx->dm_coarse, dmcoarse_method, &fp_get_coarsedm_context));
162: if (fp_get_coarsedm_context) {
163: PetscCall(PetscInfo(pc, "PCTelescope: Found composed method PCTelescopeGetCoarseDMKSPContext from coarse DM\n"));
164: PetscCall(fp_get_coarsedm_context(ctx->dm_coarse, &dmcoarse_context_user));
165: ctx->dmksp_context_user = dmcoarse_context_user;
166: dmcoarse_kspctx = dmcoarse_context_user;
167: } else {
168: PetscCall(PetscInfo(pc, "PCTelescope: Failed to find composed method PCTelescopeGetCoarseDMKSPContext from coarse DM\n"));
169: }
170: }
172: if (!dmcoarse_kspctx) {
173: PetscCall(PetscInfo(pc, "PCTelescope: KSPSetComputeOperators failed to determine the context to use on sub-communicator\n"));
174: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot determine which context with use for KSPSetComputeOperators() on sub-communicator");
175: }
176: }
177: }
178: }
180: if (dmfine_kspfunc && !sred->ignore_kspcomputeoperators) {
181: using_kspcomputeoperators = PETSC_TRUE;
183: if (PCTelescope_isActiveRank(sred)) {
184: /* sub ksp inherits dmksp_func and context provided by user */
185: PetscCall(KSPSetComputeOperators(sred->ksp, dmfine_kspfunc, dmcoarse_kspctx));
186: /* PetscCall(PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)ctx->dmrepart)); */
187: PetscCall(KSPSetDMActive(sred->ksp, PETSC_TRUE));
188: }
189: }
190: }
192: PetscCheck(has_perm || !has_kspcomputeoperators || using_kspcomputeoperators, comm, PETSC_ERR_SUP, "No method to permute an operator was found on the parent DM. A method for KSPSetComputeOperators() was provided but it was requested to be ignored. Telescope setup cannot proceed");
193: PetscCheck(has_perm || has_kspcomputeoperators, comm, PETSC_ERR_SUP, "No method to permute an operator was found on the parent DM. No method for KSPSetComputeOperators() was provided. Telescope setup cannot proceed");
195: {
196: char dmfine_method[PETSC_MAX_PATH_LEN];
198: PetscCall(PetscSNPrintf(dmfine_method, sizeof(dmfine_method), "PCTelescopeFieldScatter"));
199: PetscCall(PetscObjectQueryFunction((PetscObject)ctx->dm_fine, dmfine_method, &ctx->fp_dm_field_scatter));
201: PetscCall(PetscSNPrintf(dmfine_method, sizeof(dmfine_method), "PCTelescopeStateScatter"));
202: PetscCall(PetscObjectQueryFunction((PetscObject)ctx->dm_fine, dmfine_method, &ctx->fp_dm_state_scatter));
203: }
205: if (ctx->fp_dm_state_scatter) {
206: PetscCall(PetscInfo(pc, "PCTelescope: Found composed method PCTelescopeStateScatter from parent DM\n"));
207: } else {
208: PetscCall(PetscInfo(pc, "PCTelescope: Failed to find composed method PCTelescopeStateScatter from parent DM\n"));
209: }
211: if (ctx->fp_dm_field_scatter) {
212: PetscCall(PetscInfo(pc, "PCTelescope: Found composed method PCTelescopeFieldScatter from parent DM\n"));
213: } else {
214: PetscCall(PetscInfo(pc, "PCTelescope: Failed to find composed method PCTelescopeFieldScatter from parent DM\n"));
215: SETERRQ(comm, PETSC_ERR_SUP, "No method to scatter fields between the parent DM and coarse DM was found. Must call PetscObjectComposeFunction() with the parent DM. Telescope setup cannot proceed");
216: }
218: /* PetscCall(PCTelescopeSetUp_permutation_CoarseDM(pc,sred,ctx)); */
219: PetscCall(PCTelescopeSetUp_scatters_CoarseDM(pc, sred, ctx));
220: PetscFunctionReturn(PETSC_SUCCESS);
221: }
223: PetscErrorCode PCApply_Telescope_CoarseDM(PC pc, Vec x, Vec y)
224: {
225: PC_Telescope sred = (PC_Telescope)pc->data;
226: Vec xred, yred;
227: PC_Telescope_CoarseDMCtx *ctx;
229: PetscFunctionBegin;
230: ctx = (PC_Telescope_CoarseDMCtx *)sred->dm_ctx;
231: xred = sred->xred;
232: yred = sred->yred;
234: PetscCall(PetscCitationsRegister(citation, &cited));
236: if (ctx->fp_dm_state_scatter) PetscCall(ctx->fp_dm_state_scatter(ctx->dm_fine, SCATTER_FORWARD, ctx->dm_coarse));
238: PetscCall(ctx->fp_dm_field_scatter(ctx->dm_fine, x, SCATTER_FORWARD, ctx->dm_coarse, xred));
240: /* solve */
241: if (PCTelescope_isActiveRank(sred)) PetscCall(KSPSolve(sred->ksp, xred, yred));
243: PetscCall(ctx->fp_dm_field_scatter(ctx->dm_fine, y, SCATTER_REVERSE, ctx->dm_coarse, yred));
244: PetscFunctionReturn(PETSC_SUCCESS);
245: }
247: PetscErrorCode PCTelescopeSubNullSpaceCreate_CoarseDM(PC pc, PC_Telescope sred, MatNullSpace nullspace, MatNullSpace *sub_nullspace)
248: {
249: PetscBool has_const;
250: PetscInt k, n = 0;
251: const Vec *vecs;
252: Vec *sub_vecs = NULL;
253: MPI_Comm subcomm;
254: PC_Telescope_CoarseDMCtx *ctx;
256: PetscFunctionBegin;
257: ctx = (PC_Telescope_CoarseDMCtx *)sred->dm_ctx;
258: subcomm = sred->subcomm;
259: PetscCall(MatNullSpaceGetVecs(nullspace, &has_const, &n, &vecs));
261: if (PCTelescope_isActiveRank(sred)) {
262: /* create new vectors */
263: if (n) PetscCall(VecDuplicateVecs(sred->xred, n, &sub_vecs));
264: }
266: /* copy entries */
267: for (k = 0; k < n; k++) PetscCall(ctx->fp_dm_field_scatter(ctx->dm_fine, vecs[k], SCATTER_FORWARD, ctx->dm_coarse, sub_vecs[k]));
269: if (PCTelescope_isActiveRank(sred)) {
270: /* create new (near) nullspace for redundant object */
271: PetscCall(MatNullSpaceCreate(subcomm, has_const, n, sub_vecs, sub_nullspace));
272: PetscCall(VecDestroyVecs(n, &sub_vecs));
273: }
274: PetscFunctionReturn(PETSC_SUCCESS);
275: }
277: PetscErrorCode PCTelescopeMatNullSpaceCreate_CoarseDM(PC pc, PC_Telescope sred, Mat sub_mat)
278: {
279: Mat B;
280: PC_Telescope_CoarseDMCtx *ctx;
282: PetscFunctionBegin;
283: ctx = (PC_Telescope_CoarseDMCtx *)sred->dm_ctx;
284: PetscCall(PCGetOperators(pc, NULL, &B));
285: {
286: MatNullSpace nullspace, sub_nullspace;
287: PetscCall(MatGetNullSpace(B, &nullspace));
288: if (nullspace) {
289: PetscCall(PetscInfo(pc, "PCTelescope: generating nullspace (CoarseDM)\n"));
290: PetscCall(PCTelescopeSubNullSpaceCreate_CoarseDM(pc, sred, nullspace, &sub_nullspace));
292: /* attach any user nullspace removal methods and contexts */
293: if (PCTelescope_isActiveRank(sred)) {
294: void *context = NULL;
295: if (nullspace->remove && !nullspace->rmctx) {
296: PetscCall(MatNullSpaceSetFunction(sub_nullspace, nullspace->remove, context));
297: } else if (nullspace->remove && nullspace->rmctx) {
298: char dmcoarse_method[PETSC_MAX_PATH_LEN];
299: PetscErrorCode (*fp_get_coarsedm_context)(DM, void **) = NULL;
301: PetscCall(PetscSNPrintf(dmcoarse_method, sizeof(dmcoarse_method), "PCTelescopeGetCoarseDMNullSpaceUserContext"));
302: PetscCall(PetscObjectQueryFunction((PetscObject)ctx->dm_coarse, dmcoarse_method, &fp_get_coarsedm_context));
303: PetscCheck(context, PETSC_COMM_SELF, PETSC_ERR_SUP, "Propagation of user null-space removal method with non-NULL context requires the coarse DM be composed with a function named \"%s\"", dmcoarse_method);
304: PetscCall(MatNullSpaceSetFunction(sub_nullspace, nullspace->remove, context));
305: }
306: }
308: if (PCTelescope_isActiveRank(sred)) {
309: PetscCall(MatSetNullSpace(sub_mat, sub_nullspace));
310: PetscCall(MatNullSpaceDestroy(&sub_nullspace));
311: }
312: }
313: }
314: {
315: MatNullSpace nearnullspace, sub_nearnullspace;
316: PetscCall(MatGetNearNullSpace(B, &nearnullspace));
317: if (nearnullspace) {
318: PetscCall(PetscInfo(pc, "PCTelescope: generating near nullspace (CoarseDM)\n"));
319: PetscCall(PCTelescopeSubNullSpaceCreate_CoarseDM(pc, sred, nearnullspace, &sub_nearnullspace));
321: /* attach any user nullspace removal methods and contexts */
322: if (PCTelescope_isActiveRank(sred)) {
323: void *context = NULL;
324: if (nearnullspace->remove && !nearnullspace->rmctx) {
325: PetscCall(MatNullSpaceSetFunction(sub_nearnullspace, nearnullspace->remove, context));
326: } else if (nearnullspace->remove && nearnullspace->rmctx) {
327: char dmcoarse_method[PETSC_MAX_PATH_LEN];
328: PetscErrorCode (*fp_get_coarsedm_context)(DM, void **) = NULL;
330: PetscCall(PetscSNPrintf(dmcoarse_method, sizeof(dmcoarse_method), "PCTelescopeGetCoarseDMNearNullSpaceUserContext"));
331: PetscCall(PetscObjectQueryFunction((PetscObject)ctx->dm_coarse, dmcoarse_method, &fp_get_coarsedm_context));
332: PetscCheck(context, PETSC_COMM_SELF, PETSC_ERR_SUP, "Propagation of user near null-space removal method with non-NULL context requires the coarse DM be composed with a function named \"%s\"", dmcoarse_method);
333: PetscCall(MatNullSpaceSetFunction(sub_nearnullspace, nearnullspace->remove, context));
334: }
335: }
337: if (PCTelescope_isActiveRank(sred)) {
338: PetscCall(MatSetNearNullSpace(sub_mat, sub_nearnullspace));
339: PetscCall(MatNullSpaceDestroy(&sub_nearnullspace));
340: }
341: }
342: }
343: PetscFunctionReturn(PETSC_SUCCESS);
344: }
346: PetscErrorCode PCReset_Telescope_CoarseDM(PC pc)
347: {
348: PC_Telescope sred = (PC_Telescope)pc->data;
349: PC_Telescope_CoarseDMCtx *ctx;
351: PetscFunctionBegin;
352: ctx = (PC_Telescope_CoarseDMCtx *)sred->dm_ctx;
353: ctx->dm_fine = NULL; /* since I did not increment the ref counter we set these to NULL */
354: ctx->dm_coarse = NULL; /* since I did not increment the ref counter we set these to NULL */
355: ctx->permutation = NULL; /* this will be fetched from the dm so no need to call destroy */
356: PetscCall(VecDestroy(&ctx->xp));
357: ctx->fp_dm_field_scatter = NULL;
358: ctx->fp_dm_state_scatter = NULL;
359: ctx->dmksp_context_determined = NULL;
360: ctx->dmksp_context_user = NULL;
361: PetscFunctionReturn(PETSC_SUCCESS);
362: }
364: PetscErrorCode PCApplyRichardson_Telescope_CoarseDM(PC pc, Vec x, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason)
365: {
366: PC_Telescope sred = (PC_Telescope)pc->data;
367: Vec yred = NULL;
368: PetscBool default_init_guess_value = PETSC_FALSE;
369: PC_Telescope_CoarseDMCtx *ctx;
371: PetscFunctionBegin;
372: ctx = (PC_Telescope_CoarseDMCtx *)sred->dm_ctx;
373: yred = sred->yred;
375: PetscCheck(its <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PCApplyRichardson_Telescope_CoarseDM only supports max_it = 1");
376: *reason = (PCRichardsonConvergedReason)0;
378: if (!zeroguess) {
379: PetscCall(PetscInfo(pc, "PCTelescopeCoarseDM: Scattering y for non-zero-initial guess\n"));
381: PetscCall(ctx->fp_dm_field_scatter(ctx->dm_fine, y, SCATTER_FORWARD, ctx->dm_coarse, yred));
382: }
384: if (PCTelescope_isActiveRank(sred)) {
385: PetscCall(KSPGetInitialGuessNonzero(sred->ksp, &default_init_guess_value));
386: if (!zeroguess) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, PETSC_TRUE));
387: }
389: PetscCall(PCApply_Telescope_CoarseDM(pc, x, y));
391: if (PCTelescope_isActiveRank(sred)) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, default_init_guess_value));
393: if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
394: *outits = 1;
395: PetscFunctionReturn(PETSC_SUCCESS);
396: }