Actual source code: fetidp.c
1: #include <petsc/private/kspimpl.h>
2: #include <petsc/private/pcbddcimpl.h>
3: #include <petsc/private/pcbddcprivateimpl.h>
4: #include <petscdm.h>
6: static PetscBool cited = PETSC_FALSE;
7: static PetscBool cited2 = PETSC_FALSE;
8: static const char citation[] = "@article{ZampiniPCBDDC,\n"
9: "author = {Stefano Zampini},\n"
10: "title = {{PCBDDC}: A Class of Robust Dual-Primal Methods in {PETS}c},\n"
11: "journal = {SIAM Journal on Scientific Computing},\n"
12: "volume = {38},\n"
13: "number = {5},\n"
14: "pages = {S282-S306},\n"
15: "year = {2016},\n"
16: "doi = {10.1137/15M1025785},\n"
17: "URL = {http://dx.doi.org/10.1137/15M1025785},\n"
18: "eprint = {http://dx.doi.org/10.1137/15M1025785}\n"
19: "}\n"
20: "@article{ZampiniDualPrimal,\n"
21: "author = {Stefano Zampini},\n"
22: "title = {{D}ual-{P}rimal methods for the cardiac {B}idomain model},\n"
23: "volume = {24},\n"
24: "number = {04},\n"
25: "pages = {667-696},\n"
26: "year = {2014},\n"
27: "doi = {10.1142/S0218202513500632},\n"
28: "URL = {https://www.worldscientific.com/doi/abs/10.1142/S0218202513500632},\n"
29: "eprint = {https://www.worldscientific.com/doi/pdf/10.1142/S0218202513500632}\n"
30: "}\n";
31: static const char citation2[] = "@article{li2013nonoverlapping,\n"
32: "title={A nonoverlapping domain decomposition method for incompressible Stokes equations with continuous pressures},\n"
33: "author={Li, Jing and Tu, Xuemin},\n"
34: "journal={SIAM Journal on Numerical Analysis},\n"
35: "volume={51},\n"
36: "number={2},\n"
37: "pages={1235--1253},\n"
38: "year={2013},\n"
39: "publisher={Society for Industrial and Applied Mathematics}\n"
40: "}\n";
42: /*
43: This file implements the FETI-DP method in PETSc as part of KSP.
44: */
45: typedef struct {
46: KSP parentksp;
47: } KSP_FETIDPMon;
49: typedef struct {
50: KSP innerksp; /* the KSP for the Lagrange multipliers */
51: PC innerbddc; /* the inner BDDC object */
52: PetscBool fully_redundant; /* true for using a fully redundant set of multipliers */
53: PetscBool userbddc; /* true if the user provided the PCBDDC object */
54: PetscBool saddlepoint; /* support for saddle point problems */
55: IS pP; /* index set for pressure variables */
56: Vec rhs_flip; /* see KSPFETIDPSetUpOperators */
57: KSP_FETIDPMon *monctx; /* monitor context, used to pass user defined monitors
58: in the physical space */
59: PetscObjectState matstate; /* these are needed just in the saddle point case */
60: PetscObjectState matnnzstate; /* where we are going to use MatZeroRows on pmat */
61: PetscBool statechanged;
62: PetscBool check;
63: } KSP_FETIDP;
65: static PetscErrorCode KSPFETIDPSetPressureOperator_FETIDP(KSP ksp, Mat P)
66: {
67: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
69: PetscFunctionBegin;
70: if (P) fetidp->saddlepoint = PETSC_TRUE;
71: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_PPmat", (PetscObject)P));
72: PetscFunctionReturn(PETSC_SUCCESS);
73: }
75: /*@
76: KSPFETIDPSetPressureOperator - Sets the operator used to set up the pressure preconditioner for the saddle point `KSPFETIDP` solver,
78: Collective
80: Input Parameters:
81: + ksp - the `KSPFETIDP` solver
82: - P - the linear operator to be preconditioned, usually the mass matrix.
84: Level: advanced
86: Notes:
87: The operator can be either passed in a) monolithic global ordering, b) pressure-only global ordering
88: or c) interface pressure ordering (if `-ksp_fetidp_pressure_all false`).
89: In cases b) and c), the pressure ordering of dofs needs to satisfy
90: pid_1 < pid_2 iff gid_1 < gid_2
91: where pid_1 and pid_2 are two different pressure dof numbers and gid_1 and gid_2 the corresponding
92: id in the monolithic global ordering.
94: .seealso: [](ch_ksp), `KSPFETIDP`, `MATIS`, `PCBDDC`, `KSPFETIDPGetInnerBDDC()`, `KSPFETIDPGetInnerKSP()`, `KSPSetOperators()`
95: @*/
96: PetscErrorCode KSPFETIDPSetPressureOperator(KSP ksp, Mat P)
97: {
98: PetscFunctionBegin;
101: PetscTryMethod(ksp, "KSPFETIDPSetPressureOperator_C", (KSP, Mat), (ksp, P));
102: PetscFunctionReturn(PETSC_SUCCESS);
103: }
105: static PetscErrorCode KSPFETIDPGetInnerKSP_FETIDP(KSP ksp, KSP *innerksp)
106: {
107: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
109: PetscFunctionBegin;
110: *innerksp = fetidp->innerksp;
111: PetscFunctionReturn(PETSC_SUCCESS);
112: }
114: /*@
115: KSPFETIDPGetInnerKSP - Gets the `KSP` object for the Lagrange multipliers from inside a `KSPFETIDP`
117: Input Parameters:
118: + ksp - the `KSPFETIDP`
119: - innerksp - the `KSP` for the multipliers
121: Level: advanced
123: .seealso: [](ch_ksp), `KSPFETIDP`, `MATIS`, `PCBDDC`, `KSPFETIDPSetInnerBDDC()`, `KSPFETIDPGetInnerBDDC()`
124: @*/
125: PetscErrorCode KSPFETIDPGetInnerKSP(KSP ksp, KSP *innerksp)
126: {
127: PetscFunctionBegin;
130: PetscUseMethod(ksp, "KSPFETIDPGetInnerKSP_C", (KSP, KSP *), (ksp, innerksp));
131: PetscFunctionReturn(PETSC_SUCCESS);
132: }
134: static PetscErrorCode KSPFETIDPGetInnerBDDC_FETIDP(KSP ksp, PC *pc)
135: {
136: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
138: PetscFunctionBegin;
139: *pc = fetidp->innerbddc;
140: PetscFunctionReturn(PETSC_SUCCESS);
141: }
143: /*@
144: KSPFETIDPGetInnerBDDC - Gets the `PCBDDC` preconditioner used to set up the `KSPFETIDP` matrix for the Lagrange multipliers
146: Input Parameters:
147: + ksp - the `KSPFETIDP` Krylov solver
148: - pc - the `PCBDDC` preconditioner
150: Level: advanced
152: .seealso: [](ch_ksp), `MATIS`, `PCBDDC`, `KSPFETIDPSetInnerBDDC()`, `KSPFETIDPGetInnerKSP()`
153: @*/
154: PetscErrorCode KSPFETIDPGetInnerBDDC(KSP ksp, PC *pc)
155: {
156: PetscFunctionBegin;
159: PetscUseMethod(ksp, "KSPFETIDPGetInnerBDDC_C", (KSP, PC *), (ksp, pc));
160: PetscFunctionReturn(PETSC_SUCCESS);
161: }
163: static PetscErrorCode KSPFETIDPSetInnerBDDC_FETIDP(KSP ksp, PC pc)
164: {
165: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
167: PetscFunctionBegin;
168: PetscCall(PetscObjectReference((PetscObject)pc));
169: PetscCall(PCDestroy(&fetidp->innerbddc));
170: fetidp->innerbddc = pc;
171: fetidp->userbddc = PETSC_TRUE;
172: PetscFunctionReturn(PETSC_SUCCESS);
173: }
175: /*@
176: KSPFETIDPSetInnerBDDC - Provides the `PCBDDC` preconditioner used to set up the `KSPFETIDP` matrix for the Lagrange multipliers
178: Collective
180: Input Parameters:
181: + ksp - the `KSPFETIDP` Krylov solver
182: - pc - the `PCBDDC` preconditioner
184: Level: advanced
186: Note:
187: A `PC` is automatically created for the `KSPFETIDP` and can be accessed to change options with `KSPFETIDPGetInnerBDDC()` hence this routine is rarely needed
189: .seealso: [](ch_ksp), `MATIS`, `PCBDDC`, `KSPFETIDPGetInnerBDDC()`, `KSPFETIDPGetInnerKSP()`
190: @*/
191: PetscErrorCode KSPFETIDPSetInnerBDDC(KSP ksp, PC pc)
192: {
193: PetscBool isbddc;
195: PetscFunctionBegin;
198: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCBDDC, &isbddc));
199: PetscCheck(isbddc, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONG, "KSPFETIDPSetInnerBDDC need a PCBDDC preconditioner");
200: PetscTryMethod(ksp, "KSPFETIDPSetInnerBDDC_C", (KSP, PC), (ksp, pc));
201: PetscFunctionReturn(PETSC_SUCCESS);
202: }
204: static PetscErrorCode KSPBuildSolution_FETIDP(KSP ksp, Vec v, Vec *V)
205: {
206: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
207: Mat F;
208: Vec Xl;
210: PetscFunctionBegin;
211: PetscCall(KSPGetOperators(fetidp->innerksp, &F, NULL));
212: PetscCall(KSPBuildSolution(fetidp->innerksp, NULL, &Xl));
213: if (v) {
214: PetscCall(PCBDDCMatFETIDPGetSolution(F, Xl, v));
215: *V = v;
216: } else {
217: PetscCall(PCBDDCMatFETIDPGetSolution(F, Xl, *V));
218: }
219: PetscFunctionReturn(PETSC_SUCCESS);
220: }
222: static PetscErrorCode KSPMonitor_FETIDP(KSP ksp, PetscInt it, PetscReal rnorm, void *ctx)
223: {
224: KSP_FETIDPMon *monctx = (KSP_FETIDPMon *)ctx;
226: PetscFunctionBegin;
227: PetscCall(KSPMonitor(monctx->parentksp, it, rnorm));
228: PetscFunctionReturn(PETSC_SUCCESS);
229: }
231: static PetscErrorCode KSPComputeEigenvalues_FETIDP(KSP ksp, PetscInt nmax, PetscReal *r, PetscReal *c, PetscInt *neig)
232: {
233: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
235: PetscFunctionBegin;
236: PetscCall(KSPComputeEigenvalues(fetidp->innerksp, nmax, r, c, neig));
237: PetscFunctionReturn(PETSC_SUCCESS);
238: }
240: static PetscErrorCode KSPComputeExtremeSingularValues_FETIDP(KSP ksp, PetscReal *emax, PetscReal *emin)
241: {
242: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
244: PetscFunctionBegin;
245: PetscCall(KSPComputeExtremeSingularValues(fetidp->innerksp, emax, emin));
246: PetscFunctionReturn(PETSC_SUCCESS);
247: }
249: static PetscErrorCode KSPFETIDPCheckOperators(KSP ksp, PetscViewer viewer)
250: {
251: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
252: PC_BDDC *pcbddc = (PC_BDDC *)fetidp->innerbddc->data;
253: PC_IS *pcis = (PC_IS *)fetidp->innerbddc->data;
254: Mat_IS *matis = (Mat_IS *)fetidp->innerbddc->pmat->data;
255: Mat F;
256: FETIDPMat_ctx fetidpmat_ctx;
257: Vec test_vec, test_vec_p = NULL, fetidp_global;
258: IS dirdofs, isvert;
259: MPI_Comm comm = PetscObjectComm((PetscObject)ksp);
260: PetscScalar sval, *array;
261: PetscReal val, rval;
262: const PetscInt *vertex_indices;
263: PetscInt i, n_vertices;
264: PetscBool isascii;
266: PetscFunctionBegin;
267: PetscCheckSameComm(ksp, 1, viewer, 2);
268: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
269: PetscCheck(isascii, comm, PETSC_ERR_SUP, "Unsupported viewer");
270: PetscCall(PetscViewerASCIIPrintf(viewer, "----------FETI-DP MAT --------------\n"));
271: PetscCall(PetscViewerASCIIAddTab(viewer, 2));
272: PetscCall(KSPGetOperators(fetidp->innerksp, &F, NULL));
273: PetscCall(PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_INFO));
274: PetscCall(MatView(F, viewer));
275: PetscCall(PetscViewerPopFormat(viewer));
276: PetscCall(PetscViewerASCIISubtractTab(viewer, 2));
277: PetscCall(MatShellGetContext(F, &fetidpmat_ctx));
278: PetscCall(PetscViewerASCIIPrintf(viewer, "----------FETI-DP TESTS--------------\n"));
279: PetscCall(PetscViewerASCIIPrintf(viewer, "All tests should return zero!\n"));
280: PetscCall(PetscViewerASCIIPrintf(viewer, "FETIDP MAT context in the "));
281: if (fetidp->fully_redundant) {
282: PetscCall(PetscViewerASCIIPrintf(viewer, "fully redundant case for lagrange multipliers.\n"));
283: } else {
284: PetscCall(PetscViewerASCIIPrintf(viewer, "Non-fully redundant case for lagrange multiplier.\n"));
285: }
286: PetscCall(PetscViewerFlush(viewer));
288: /* Get Vertices used to define the BDDC */
289: PetscCall(PCBDDCGraphGetCandidatesIS(pcbddc->mat_graph, NULL, NULL, NULL, NULL, &isvert));
290: PetscCall(ISGetLocalSize(isvert, &n_vertices));
291: PetscCall(ISGetIndices(isvert, &vertex_indices));
293: /******************************************************************/
294: /* TEST A/B: Test numbering of global fetidp dofs */
295: /******************************************************************/
296: PetscCall(MatCreateVecs(F, &fetidp_global, NULL));
297: PetscCall(VecDuplicate(fetidpmat_ctx->lambda_local, &test_vec));
298: PetscCall(VecSet(fetidp_global, 1.0));
299: PetscCall(VecSet(test_vec, 1.));
300: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_lambda, fetidp_global, fetidpmat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE));
301: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_lambda, fetidp_global, fetidpmat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE));
302: if (fetidpmat_ctx->l2g_p) {
303: PetscCall(VecDuplicate(fetidpmat_ctx->vP, &test_vec_p));
304: PetscCall(VecSet(test_vec_p, 1.));
305: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_p, fetidp_global, fetidpmat_ctx->vP, INSERT_VALUES, SCATTER_REVERSE));
306: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_p, fetidp_global, fetidpmat_ctx->vP, INSERT_VALUES, SCATTER_REVERSE));
307: }
308: PetscCall(VecAXPY(test_vec, -1.0, fetidpmat_ctx->lambda_local));
309: PetscCall(VecNorm(test_vec, NORM_INFINITY, &val));
310: PetscCall(VecDestroy(&test_vec));
311: PetscCallMPI(MPI_Reduce(&val, &rval, 1, MPIU_REAL, MPIU_MAX, 0, comm));
312: PetscCall(PetscViewerASCIIPrintf(viewer, "A: CHECK glob to loc: % 1.14e\n", (double)rval));
314: if (fetidpmat_ctx->l2g_p) {
315: PetscCall(VecAXPY(test_vec_p, -1.0, fetidpmat_ctx->vP));
316: PetscCall(VecNorm(test_vec_p, NORM_INFINITY, &val));
317: PetscCallMPI(MPI_Reduce(&val, &rval, 1, MPIU_REAL, MPIU_MAX, 0, comm));
318: PetscCall(PetscViewerASCIIPrintf(viewer, "A: CHECK glob to loc (p): % 1.14e\n", (double)rval));
319: }
321: if (fetidp->fully_redundant) {
322: PetscCall(VecSet(fetidp_global, 0.0));
323: PetscCall(VecSet(fetidpmat_ctx->lambda_local, 0.5));
324: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_lambda, fetidpmat_ctx->lambda_local, fetidp_global, ADD_VALUES, SCATTER_FORWARD));
325: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_lambda, fetidpmat_ctx->lambda_local, fetidp_global, ADD_VALUES, SCATTER_FORWARD));
326: PetscCall(VecSum(fetidp_global, &sval));
327: val = PetscRealPart(sval) - fetidpmat_ctx->n_lambda;
328: PetscCallMPI(MPI_Reduce(&val, &rval, 1, MPIU_REAL, MPIU_MAX, 0, comm));
329: PetscCall(PetscViewerASCIIPrintf(viewer, "B: CHECK loc to glob: % 1.14e\n", (double)rval));
330: }
332: if (fetidpmat_ctx->l2g_p) {
333: PetscCall(VecSet(pcis->vec1_N, 1.0));
334: PetscCall(VecSet(pcis->vec1_global, 0.0));
335: PetscCall(VecScatterBegin(matis->rctx, pcis->vec1_N, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
336: PetscCall(VecScatterEnd(matis->rctx, pcis->vec1_N, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
338: PetscCall(VecSet(fetidp_global, 0.0));
339: PetscCall(VecSet(fetidpmat_ctx->vP, -1.0));
340: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_p, fetidpmat_ctx->vP, fetidp_global, ADD_VALUES, SCATTER_FORWARD));
341: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_p, fetidpmat_ctx->vP, fetidp_global, ADD_VALUES, SCATTER_FORWARD));
342: PetscCall(VecScatterBegin(fetidpmat_ctx->g2g_p, fetidp_global, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
343: PetscCall(VecScatterEnd(fetidpmat_ctx->g2g_p, fetidp_global, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
344: PetscCall(VecScatterBegin(fetidpmat_ctx->g2g_p, pcis->vec1_global, fetidp_global, INSERT_VALUES, SCATTER_FORWARD));
345: PetscCall(VecScatterEnd(fetidpmat_ctx->g2g_p, pcis->vec1_global, fetidp_global, INSERT_VALUES, SCATTER_FORWARD));
346: PetscCall(VecSum(fetidp_global, &sval));
347: val = PetscRealPart(sval);
348: PetscCallMPI(MPI_Reduce(&val, &rval, 1, MPIU_REAL, MPIU_MAX, 0, comm));
349: PetscCall(PetscViewerASCIIPrintf(viewer, "B: CHECK loc to glob (p): % 1.14e\n", (double)rval));
350: }
352: /******************************************************************/
353: /* TEST C: It should hold B_delta*w=0, w\in\widehat{W} */
354: /* This is the meaning of the B matrix */
355: /******************************************************************/
357: PetscCall(VecSetRandom(pcis->vec1_N, NULL));
358: PetscCall(VecSet(pcis->vec1_global, 0.0));
359: PetscCall(VecScatterBegin(matis->rctx, pcis->vec1_N, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
360: PetscCall(VecScatterEnd(matis->rctx, pcis->vec1_N, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
361: PetscCall(VecScatterBegin(matis->rctx, pcis->vec1_global, pcis->vec1_N, INSERT_VALUES, SCATTER_FORWARD));
362: PetscCall(VecScatterEnd(matis->rctx, pcis->vec1_global, pcis->vec1_N, INSERT_VALUES, SCATTER_FORWARD));
363: PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
364: PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
365: /* Action of B_delta */
366: PetscCall(MatMult(fetidpmat_ctx->B_delta, pcis->vec1_B, fetidpmat_ctx->lambda_local));
367: PetscCall(VecSet(fetidp_global, 0.0));
368: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_lambda, fetidpmat_ctx->lambda_local, fetidp_global, ADD_VALUES, SCATTER_FORWARD));
369: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_lambda, fetidpmat_ctx->lambda_local, fetidp_global, ADD_VALUES, SCATTER_FORWARD));
370: PetscCall(VecNorm(fetidp_global, NORM_INFINITY, &val));
371: PetscCall(PetscViewerASCIIPrintf(viewer, "C: CHECK infty norm of B_delta*w (w continuous): % 1.14e\n", (double)val));
373: /******************************************************************/
374: /* TEST D: It should hold E_Dw = w - P_Dw w\in\widetilde{W} */
375: /* E_D = R_D^TR */
376: /* P_D = B_{D,delta}^T B_{delta} */
377: /* eq.44 Mandel Tezaur and Dohrmann 2005 */
378: /******************************************************************/
380: /* compute a random vector in \widetilde{W} */
381: PetscCall(VecSetRandom(pcis->vec1_N, NULL));
382: /* set zero at vertices and essential dofs */
383: PetscCall(VecGetArray(pcis->vec1_N, &array));
384: for (i = 0; i < n_vertices; i++) array[vertex_indices[i]] = 0.0;
385: PetscCall(PCBDDCGraphGetDirichletDofs(pcbddc->mat_graph, &dirdofs));
386: if (dirdofs) {
387: const PetscInt *idxs;
388: PetscInt ndir;
390: PetscCall(ISGetLocalSize(dirdofs, &ndir));
391: PetscCall(ISGetIndices(dirdofs, &idxs));
392: for (i = 0; i < ndir; i++) array[idxs[i]] = 0.0;
393: PetscCall(ISRestoreIndices(dirdofs, &idxs));
394: }
395: PetscCall(VecRestoreArray(pcis->vec1_N, &array));
396: /* store w for final comparison */
397: PetscCall(VecDuplicate(pcis->vec1_B, &test_vec));
398: PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_N, test_vec, INSERT_VALUES, SCATTER_FORWARD));
399: PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_N, test_vec, INSERT_VALUES, SCATTER_FORWARD));
401: /* Jump operator P_D : results stored in pcis->vec1_B */
402: /* Action of B_delta */
403: PetscCall(MatMult(fetidpmat_ctx->B_delta, test_vec, fetidpmat_ctx->lambda_local));
404: PetscCall(VecSet(fetidp_global, 0.0));
405: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_lambda, fetidpmat_ctx->lambda_local, fetidp_global, ADD_VALUES, SCATTER_FORWARD));
406: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_lambda, fetidpmat_ctx->lambda_local, fetidp_global, ADD_VALUES, SCATTER_FORWARD));
407: /* Action of B_Ddelta^T */
408: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_lambda, fetidp_global, fetidpmat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE));
409: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_lambda, fetidp_global, fetidpmat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE));
410: PetscCall(MatMultTranspose(fetidpmat_ctx->B_Ddelta, fetidpmat_ctx->lambda_local, pcis->vec1_B));
412: /* Average operator E_D : results stored in pcis->vec2_B */
413: PetscCall(PCBDDCScalingExtension(fetidpmat_ctx->pc, test_vec, pcis->vec1_global));
414: PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_global, pcis->vec2_B, INSERT_VALUES, SCATTER_FORWARD));
415: PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_global, pcis->vec2_B, INSERT_VALUES, SCATTER_FORWARD));
417: /* test E_D=I-P_D */
418: PetscCall(VecAXPY(pcis->vec1_B, 1.0, pcis->vec2_B));
419: PetscCall(VecAXPY(pcis->vec1_B, -1.0, test_vec));
420: PetscCall(VecNorm(pcis->vec1_B, NORM_INFINITY, &val));
421: PetscCall(VecDestroy(&test_vec));
422: PetscCallMPI(MPI_Reduce(&val, &rval, 1, MPIU_REAL, MPIU_MAX, 0, comm));
423: PetscCall(PetscViewerASCIIPrintf(viewer, "%d: CHECK infty norm of E_D + P_D - I: %1.14e\n", PetscGlobalRank, (double)val));
425: /******************************************************************/
426: /* TEST E: It should hold R_D^TP_Dw=0 w\in\widetilde{W} */
427: /* eq.48 Mandel Tezaur and Dohrmann 2005 */
428: /******************************************************************/
430: PetscCall(VecSetRandom(pcis->vec1_N, NULL));
431: /* set zero at vertices and essential dofs */
432: PetscCall(VecGetArray(pcis->vec1_N, &array));
433: for (i = 0; i < n_vertices; i++) array[vertex_indices[i]] = 0.0;
434: if (dirdofs) {
435: const PetscInt *idxs;
436: PetscInt ndir;
438: PetscCall(ISGetLocalSize(dirdofs, &ndir));
439: PetscCall(ISGetIndices(dirdofs, &idxs));
440: for (i = 0; i < ndir; i++) array[idxs[i]] = 0.0;
441: PetscCall(ISRestoreIndices(dirdofs, &idxs));
442: }
443: PetscCall(VecRestoreArray(pcis->vec1_N, &array));
445: /* Jump operator P_D : results stored in pcis->vec1_B */
447: PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
448: PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
449: /* Action of B_delta */
450: PetscCall(MatMult(fetidpmat_ctx->B_delta, pcis->vec1_B, fetidpmat_ctx->lambda_local));
451: PetscCall(VecSet(fetidp_global, 0.0));
452: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_lambda, fetidpmat_ctx->lambda_local, fetidp_global, ADD_VALUES, SCATTER_FORWARD));
453: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_lambda, fetidpmat_ctx->lambda_local, fetidp_global, ADD_VALUES, SCATTER_FORWARD));
454: /* Action of B_Ddelta^T */
455: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_lambda, fetidp_global, fetidpmat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE));
456: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_lambda, fetidp_global, fetidpmat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE));
457: PetscCall(MatMultTranspose(fetidpmat_ctx->B_Ddelta, fetidpmat_ctx->lambda_local, pcis->vec1_B));
458: /* scaling */
459: PetscCall(PCBDDCScalingExtension(fetidpmat_ctx->pc, pcis->vec1_B, pcis->vec1_global));
460: PetscCall(VecNorm(pcis->vec1_global, NORM_INFINITY, &val));
461: PetscCall(PetscViewerASCIIPrintf(viewer, "E: CHECK infty norm of R^T_D P_D: % 1.14e\n", (double)val));
463: if (!fetidp->fully_redundant) {
464: /******************************************************************/
465: /* TEST F: It should holds B_{delta}B^T_{D,delta}=I */
466: /* Corollary thm 14 Mandel Tezaur and Dohrmann 2005 */
467: /******************************************************************/
468: PetscCall(VecDuplicate(fetidp_global, &test_vec));
469: PetscCall(VecSetRandom(fetidp_global, NULL));
470: if (fetidpmat_ctx->l2g_p) {
471: PetscCall(VecSet(fetidpmat_ctx->vP, 0.));
472: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_p, fetidpmat_ctx->vP, fetidp_global, INSERT_VALUES, SCATTER_FORWARD));
473: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_p, fetidpmat_ctx->vP, fetidp_global, INSERT_VALUES, SCATTER_FORWARD));
474: }
475: /* Action of B_Ddelta^T */
476: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_lambda, fetidp_global, fetidpmat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE));
477: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_lambda, fetidp_global, fetidpmat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE));
478: PetscCall(MatMultTranspose(fetidpmat_ctx->B_Ddelta, fetidpmat_ctx->lambda_local, pcis->vec1_B));
479: /* Action of B_delta */
480: PetscCall(MatMult(fetidpmat_ctx->B_delta, pcis->vec1_B, fetidpmat_ctx->lambda_local));
481: PetscCall(VecSet(test_vec, 0.0));
482: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_lambda, fetidpmat_ctx->lambda_local, test_vec, ADD_VALUES, SCATTER_FORWARD));
483: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_lambda, fetidpmat_ctx->lambda_local, test_vec, ADD_VALUES, SCATTER_FORWARD));
484: PetscCall(VecAXPY(fetidp_global, -1., test_vec));
485: PetscCall(VecNorm(fetidp_global, NORM_INFINITY, &val));
486: PetscCall(PetscViewerASCIIPrintf(viewer, "E: CHECK infty norm of P^T_D - I: % 1.14e\n", (double)val));
487: PetscCall(VecDestroy(&test_vec));
488: }
489: PetscCall(PetscViewerASCIIPrintf(viewer, "-------------------------------------\n"));
490: PetscCall(PetscViewerFlush(viewer));
491: PetscCall(VecDestroy(&test_vec_p));
492: PetscCall(ISDestroy(&dirdofs));
493: PetscCall(VecDestroy(&fetidp_global));
494: PetscCall(ISRestoreIndices(isvert, &vertex_indices));
495: PetscCall(PCBDDCGraphRestoreCandidatesIS(pcbddc->mat_graph, NULL, NULL, NULL, NULL, &isvert));
496: PetscFunctionReturn(PETSC_SUCCESS);
497: }
499: static PetscErrorCode KSPFETIDPSetUpOperators(KSP ksp)
500: {
501: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
502: PC_BDDC *pcbddc = (PC_BDDC *)fetidp->innerbddc->data;
503: Mat A, Ap;
504: PetscInt fid = -1;
505: PetscMPIInt size;
506: PetscBool ismatis, pisz = PETSC_FALSE, allp = PETSC_FALSE, schp = PETSC_FALSE;
507: PetscBool flip = PETSC_FALSE; /* Usually, Stokes is written (B = -\int_\Omega \nabla \cdot u q)
508: | A B'| | v | = | f |
509: | B 0 | | p | = | g |
510: If -ksp_fetidp_saddlepoint_flip is true, the code assumes it is written as
511: | A B'| | v | = | f |
512: |-B 0 | | p | = |-g |
513: */
514: PetscObjectState matstate, matnnzstate;
516: PetscFunctionBegin;
517: PetscOptionsBegin(PetscObjectComm((PetscObject)ksp), ((PetscObject)ksp)->prefix, "FETI-DP options", "PC");
518: PetscCall(PetscOptionsInt("-ksp_fetidp_pressure_field", "Field id for pressures for saddle-point problems", NULL, fid, &fid, NULL));
519: PetscCall(PetscOptionsBool("-ksp_fetidp_pressure_all", "Use the whole pressure set instead of just that at the interface", NULL, allp, &allp, NULL));
520: PetscCall(PetscOptionsBool("-ksp_fetidp_saddlepoint_flip", "Flip the sign of the pressure-velocity (lower-left) block", NULL, flip, &flip, NULL));
521: PetscCall(PetscOptionsBool("-ksp_fetidp_pressure_schur", "Use a BDDC solver for pressure", NULL, schp, &schp, NULL));
522: PetscOptionsEnd();
524: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)ksp), &size));
525: fetidp->saddlepoint = (fid >= 0 ? PETSC_TRUE : fetidp->saddlepoint);
526: if (size == 1) fetidp->saddlepoint = PETSC_FALSE;
528: PetscCall(KSPGetOperators(ksp, &A, &Ap));
529: PetscCall(PetscObjectTypeCompare((PetscObject)A, MATIS, &ismatis));
530: PetscCheck(ismatis, PetscObjectComm((PetscObject)ksp), PETSC_ERR_USER, "Amat should be of type MATIS");
532: /* Quiet return if the matrix states are unchanged.
533: Needed only for the saddle point case since it uses MatZeroRows
534: on a matrix that may not have changed */
535: PetscCall(PetscObjectStateGet((PetscObject)A, &matstate));
536: PetscCall(MatGetNonzeroState(A, &matnnzstate));
537: if (matstate == fetidp->matstate && matnnzstate == fetidp->matnnzstate) PetscFunctionReturn(PETSC_SUCCESS);
538: fetidp->matstate = matstate;
539: fetidp->matnnzstate = matnnzstate;
540: fetidp->statechanged = fetidp->saddlepoint;
542: /* see if we have some fields attached */
543: if (!pcbddc->n_ISForDofsLocal && !pcbddc->n_ISForDofs) {
544: DM dm;
545: PetscContainer c;
547: PetscCall(KSPGetDM(ksp, &dm));
548: PetscCall(PetscObjectQuery((PetscObject)A, "_convert_nest_lfields", (PetscObject *)&c));
549: if (dm) {
550: IS *fields;
551: PetscInt nf, i;
553: PetscCall(DMCreateFieldDecomposition(dm, &nf, NULL, &fields, NULL));
554: PetscCall(PCBDDCSetDofsSplitting(fetidp->innerbddc, nf, fields));
555: for (i = 0; i < nf; i++) PetscCall(ISDestroy(&fields[i]));
556: PetscCall(PetscFree(fields));
557: } else if (c) {
558: MatISLocalFields lf;
560: PetscCall(PetscContainerGetPointer(c, (void **)&lf));
561: PetscCall(PCBDDCSetDofsSplittingLocal(fetidp->innerbddc, lf->nr, lf->rf));
562: }
563: }
565: if (!fetidp->saddlepoint) {
566: PetscCall(PCSetOperators(fetidp->innerbddc, A, A));
567: } else {
568: Mat nA, lA, PPmat;
569: MatNullSpace nnsp;
570: IS pP;
571: PetscInt totP;
573: PetscCall(MatISGetLocalMat(A, &lA));
574: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_lA", (PetscObject)lA));
576: pP = fetidp->pP;
577: if (!pP) { /* first time, need to compute pressure dofs */
578: PC_IS *pcis = (PC_IS *)fetidp->innerbddc->data;
579: Mat_IS *matis = (Mat_IS *)(A->data);
580: ISLocalToGlobalMapping l2g;
581: IS lP = NULL, II, pII, lPall, Pall, is1, is2;
582: const PetscInt *idxs;
583: PetscInt nl, ni, *widxs;
584: PetscInt i, j, n_neigh, *neigh, *n_shared, **shared, *count;
585: PetscInt rst, ren, n;
586: PetscBool ploc;
588: PetscCall(MatGetLocalSize(A, &nl, NULL));
589: PetscCall(MatGetOwnershipRange(A, &rst, &ren));
590: PetscCall(MatGetLocalSize(lA, &n, NULL));
591: PetscCall(MatISGetLocalToGlobalMapping(A, &l2g, NULL));
593: if (!pcis->is_I_local) { /* need to compute interior dofs */
594: PetscCall(PetscCalloc1(n, &count));
595: PetscCall(ISLocalToGlobalMappingGetInfo(l2g, &n_neigh, &neigh, &n_shared, &shared));
596: for (i = 1; i < n_neigh; i++)
597: for (j = 0; j < n_shared[i]; j++) count[shared[i][j]] += 1;
598: for (i = 0, j = 0; i < n; i++)
599: if (!count[i]) count[j++] = i;
600: PetscCall(ISLocalToGlobalMappingRestoreInfo(l2g, &n_neigh, &neigh, &n_shared, &shared));
601: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, j, count, PETSC_OWN_POINTER, &II));
602: } else {
603: PetscCall(PetscObjectReference((PetscObject)pcis->is_I_local));
604: II = pcis->is_I_local;
605: }
607: /* interior dofs in layout */
608: PetscCall(PetscArrayzero(matis->sf_leafdata, n));
609: PetscCall(PetscArrayzero(matis->sf_rootdata, nl));
610: PetscCall(ISGetLocalSize(II, &ni));
611: PetscCall(ISGetIndices(II, &idxs));
612: for (i = 0; i < ni; i++) matis->sf_leafdata[idxs[i]] = 1;
613: PetscCall(ISRestoreIndices(II, &idxs));
614: PetscCall(PetscSFReduceBegin(matis->sf, MPIU_INT, matis->sf_leafdata, matis->sf_rootdata, MPI_REPLACE));
615: PetscCall(PetscSFReduceEnd(matis->sf, MPIU_INT, matis->sf_leafdata, matis->sf_rootdata, MPI_REPLACE));
616: PetscCall(PetscMalloc1(PetscMax(nl, n), &widxs));
617: for (i = 0, ni = 0; i < nl; i++)
618: if (matis->sf_rootdata[i]) widxs[ni++] = i + rst;
619: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)ksp), ni, widxs, PETSC_COPY_VALUES, &pII));
621: /* pressure dofs */
622: Pall = NULL;
623: lPall = NULL;
624: ploc = PETSC_FALSE;
625: if (fid < 0) { /* zero pressure block */
626: PetscInt np;
628: PetscCall(MatFindZeroDiagonals(A, &Pall));
629: PetscCall(ISGetSize(Pall, &np));
630: if (!np) { /* zero-block not found, defaults to last field (if set) */
631: fid = pcbddc->n_ISForDofsLocal ? pcbddc->n_ISForDofsLocal - 1 : pcbddc->n_ISForDofs - 1;
632: PetscCall(ISDestroy(&Pall));
633: } else if (!pcbddc->n_ISForDofsLocal && !pcbddc->n_ISForDofs) {
634: PetscCall(PCBDDCSetDofsSplitting(fetidp->innerbddc, 1, &Pall));
635: }
636: }
637: if (!Pall) { /* look for registered fields */
638: if (pcbddc->n_ISForDofsLocal) {
639: PetscInt np;
641: PetscCheck(fid >= 0 && fid < pcbddc->n_ISForDofsLocal, PetscObjectComm((PetscObject)ksp), PETSC_ERR_USER, "Invalid field id for pressure %" PetscInt_FMT ", max %" PetscInt_FMT, fid, pcbddc->n_ISForDofsLocal);
642: /* need a sequential IS */
643: PetscCall(ISGetLocalSize(pcbddc->ISForDofsLocal[fid], &np));
644: PetscCall(ISGetIndices(pcbddc->ISForDofsLocal[fid], &idxs));
645: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, np, idxs, PETSC_COPY_VALUES, &lPall));
646: PetscCall(ISRestoreIndices(pcbddc->ISForDofsLocal[fid], &idxs));
647: ploc = PETSC_TRUE;
648: } else if (pcbddc->n_ISForDofs) {
649: PetscCheck(fid >= 0 && fid < pcbddc->n_ISForDofs, PetscObjectComm((PetscObject)ksp), PETSC_ERR_USER, "Invalid field id for pressure %" PetscInt_FMT ", max %" PetscInt_FMT, fid, pcbddc->n_ISForDofs);
650: PetscCall(PetscObjectReference((PetscObject)pcbddc->ISForDofs[fid]));
651: Pall = pcbddc->ISForDofs[fid];
652: } else SETERRQ(PetscObjectComm((PetscObject)ksp), PETSC_ERR_USER, "Cannot detect pressure field! Use KSPFETIDPGetInnerBDDC() + PCBDDCSetDofsSplitting or PCBDDCSetDofsSplittingLocal");
653: }
655: /* if the user requested the entire pressure,
656: remove the interior pressure dofs from II (or pII) */
657: if (allp) {
658: if (ploc) {
659: IS nII;
660: PetscCall(ISDifference(II, lPall, &nII));
661: PetscCall(ISDestroy(&II));
662: II = nII;
663: } else {
664: IS nII;
665: PetscCall(ISDifference(pII, Pall, &nII));
666: PetscCall(ISDestroy(&pII));
667: pII = nII;
668: }
669: }
670: if (ploc) {
671: PetscCall(ISDifference(lPall, II, &lP));
672: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_lP", (PetscObject)lP));
673: } else {
674: PetscCall(ISDifference(Pall, pII, &pP));
675: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_pP", (PetscObject)pP));
676: /* need all local pressure dofs */
677: PetscCall(PetscArrayzero(matis->sf_leafdata, n));
678: PetscCall(PetscArrayzero(matis->sf_rootdata, nl));
679: PetscCall(ISGetLocalSize(Pall, &ni));
680: PetscCall(ISGetIndices(Pall, &idxs));
681: for (i = 0; i < ni; i++) matis->sf_rootdata[idxs[i] - rst] = 1;
682: PetscCall(ISRestoreIndices(Pall, &idxs));
683: PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
684: PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
685: for (i = 0, ni = 0; i < n; i++)
686: if (matis->sf_leafdata[i]) widxs[ni++] = i;
687: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, ni, widxs, PETSC_COPY_VALUES, &lPall));
688: }
690: if (!Pall) {
691: PetscCall(PetscArrayzero(matis->sf_leafdata, n));
692: PetscCall(PetscArrayzero(matis->sf_rootdata, nl));
693: PetscCall(ISGetLocalSize(lPall, &ni));
694: PetscCall(ISGetIndices(lPall, &idxs));
695: for (i = 0; i < ni; i++) matis->sf_leafdata[idxs[i]] = 1;
696: PetscCall(ISRestoreIndices(lPall, &idxs));
697: PetscCall(PetscSFReduceBegin(matis->sf, MPIU_INT, matis->sf_leafdata, matis->sf_rootdata, MPI_REPLACE));
698: PetscCall(PetscSFReduceEnd(matis->sf, MPIU_INT, matis->sf_leafdata, matis->sf_rootdata, MPI_REPLACE));
699: for (i = 0, ni = 0; i < nl; i++)
700: if (matis->sf_rootdata[i]) widxs[ni++] = i + rst;
701: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)ksp), ni, widxs, PETSC_COPY_VALUES, &Pall));
702: }
703: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_aP", (PetscObject)Pall));
705: if (flip) {
706: PetscInt npl;
707: PetscCall(ISGetLocalSize(Pall, &npl));
708: PetscCall(ISGetIndices(Pall, &idxs));
709: PetscCall(MatCreateVecs(A, NULL, &fetidp->rhs_flip));
710: PetscCall(VecSet(fetidp->rhs_flip, 1.));
711: PetscCall(VecSetOption(fetidp->rhs_flip, VEC_IGNORE_OFF_PROC_ENTRIES, PETSC_TRUE));
712: for (i = 0; i < npl; i++) PetscCall(VecSetValue(fetidp->rhs_flip, idxs[i], -1., INSERT_VALUES));
713: PetscCall(VecAssemblyBegin(fetidp->rhs_flip));
714: PetscCall(VecAssemblyEnd(fetidp->rhs_flip));
715: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_flip", (PetscObject)fetidp->rhs_flip));
716: PetscCall(ISRestoreIndices(Pall, &idxs));
717: }
718: PetscCall(ISDestroy(&Pall));
719: PetscCall(ISDestroy(&pII));
721: /* local selected pressures in subdomain-wise and global ordering */
722: PetscCall(PetscArrayzero(matis->sf_leafdata, n));
723: PetscCall(PetscArrayzero(matis->sf_rootdata, nl));
724: if (!ploc) {
725: PetscInt *widxs2;
727: PetscCheck(pP, PetscObjectComm((PetscObject)ksp), PETSC_ERR_PLIB, "Missing parallel pressure IS");
728: PetscCall(ISGetLocalSize(pP, &ni));
729: PetscCall(ISGetIndices(pP, &idxs));
730: for (i = 0; i < ni; i++) matis->sf_rootdata[idxs[i] - rst] = 1;
731: PetscCall(ISRestoreIndices(pP, &idxs));
732: PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
733: PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
734: for (i = 0, ni = 0; i < n; i++)
735: if (matis->sf_leafdata[i]) widxs[ni++] = i;
736: PetscCall(PetscMalloc1(ni, &widxs2));
737: PetscCall(ISLocalToGlobalMappingApply(l2g, ni, widxs, widxs2));
738: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, ni, widxs, PETSC_COPY_VALUES, &lP));
739: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_lP", (PetscObject)lP));
740: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)ksp), ni, widxs2, PETSC_OWN_POINTER, &is1));
741: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_gP", (PetscObject)is1));
742: PetscCall(ISDestroy(&is1));
743: } else {
744: PetscCheck(lP, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing sequential pressure IS");
745: PetscCall(ISGetLocalSize(lP, &ni));
746: PetscCall(ISGetIndices(lP, &idxs));
747: for (i = 0; i < ni; i++)
748: if (idxs[i] >= 0 && idxs[i] < n) matis->sf_leafdata[idxs[i]] = 1;
749: PetscCall(ISRestoreIndices(lP, &idxs));
750: PetscCall(PetscSFReduceBegin(matis->sf, MPIU_INT, matis->sf_leafdata, matis->sf_rootdata, MPI_REPLACE));
751: PetscCall(ISLocalToGlobalMappingApply(l2g, ni, idxs, widxs));
752: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)ksp), ni, widxs, PETSC_COPY_VALUES, &is1));
753: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_gP", (PetscObject)is1));
754: PetscCall(ISDestroy(&is1));
755: PetscCall(PetscSFReduceEnd(matis->sf, MPIU_INT, matis->sf_leafdata, matis->sf_rootdata, MPI_REPLACE));
756: for (i = 0, ni = 0; i < nl; i++)
757: if (matis->sf_rootdata[i]) widxs[ni++] = i + rst;
758: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)ksp), ni, widxs, PETSC_COPY_VALUES, &pP));
759: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_pP", (PetscObject)pP));
760: }
761: PetscCall(PetscFree(widxs));
763: /* If there's any "interior pressure",
764: we may want to use a discrete harmonic solver instead
765: of a Stokes harmonic for the Dirichlet preconditioner
766: Need to extract the interior velocity dofs in interior dofs ordering (iV)
767: and interior pressure dofs in local ordering (iP) */
768: if (!allp) {
769: ISLocalToGlobalMapping l2g_t;
771: PetscCall(ISDifference(lPall, lP, &is1));
772: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_iP", (PetscObject)is1));
773: PetscCall(ISDifference(II, is1, &is2));
774: PetscCall(ISDestroy(&is1));
775: PetscCall(ISLocalToGlobalMappingCreateIS(II, &l2g_t));
776: PetscCall(ISGlobalToLocalMappingApplyIS(l2g_t, IS_GTOLM_DROP, is2, &is1));
777: PetscCall(ISGetLocalSize(is1, &i));
778: PetscCall(ISGetLocalSize(is2, &j));
779: PetscCheck(i == j, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent local sizes %" PetscInt_FMT " and %" PetscInt_FMT " for iV", i, j);
780: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_iV", (PetscObject)is1));
781: PetscCall(ISLocalToGlobalMappingDestroy(&l2g_t));
782: PetscCall(ISDestroy(&is1));
783: PetscCall(ISDestroy(&is2));
784: }
785: PetscCall(ISDestroy(&II));
787: /* exclude selected pressures from the inner BDDC */
788: if (pcbddc->DirichletBoundariesLocal) {
789: IS list[2], plP, isout;
790: PetscInt np;
792: /* need a parallel IS */
793: PetscCall(ISGetLocalSize(lP, &np));
794: PetscCall(ISGetIndices(lP, &idxs));
795: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)ksp), np, idxs, PETSC_USE_POINTER, &plP));
796: list[0] = plP;
797: list[1] = pcbddc->DirichletBoundariesLocal;
798: PetscCall(ISConcatenate(PetscObjectComm((PetscObject)ksp), 2, list, &isout));
799: PetscCall(ISSortRemoveDups(isout));
800: PetscCall(ISDestroy(&plP));
801: PetscCall(ISRestoreIndices(lP, &idxs));
802: PetscCall(PCBDDCSetDirichletBoundariesLocal(fetidp->innerbddc, isout));
803: PetscCall(ISDestroy(&isout));
804: } else if (pcbddc->DirichletBoundaries) {
805: IS list[2], isout;
807: list[0] = pP;
808: list[1] = pcbddc->DirichletBoundaries;
809: PetscCall(ISConcatenate(PetscObjectComm((PetscObject)ksp), 2, list, &isout));
810: PetscCall(ISSortRemoveDups(isout));
811: PetscCall(PCBDDCSetDirichletBoundaries(fetidp->innerbddc, isout));
812: PetscCall(ISDestroy(&isout));
813: } else {
814: IS plP;
815: PetscInt np;
817: /* need a parallel IS */
818: PetscCall(ISGetLocalSize(lP, &np));
819: PetscCall(ISGetIndices(lP, &idxs));
820: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)ksp), np, idxs, PETSC_COPY_VALUES, &plP));
821: PetscCall(PCBDDCSetDirichletBoundariesLocal(fetidp->innerbddc, plP));
822: PetscCall(ISDestroy(&plP));
823: PetscCall(ISRestoreIndices(lP, &idxs));
824: }
826: /* save CSR information for the pressure BDDC solver (if any) */
827: if (schp) {
828: PetscInt np, nt;
830: PetscCall(MatGetSize(matis->A, &nt, NULL));
831: PetscCall(ISGetLocalSize(lP, &np));
832: if (np) {
833: PetscInt *xadj = pcbddc->mat_graph->xadj;
834: PetscInt *adjn = pcbddc->mat_graph->adjncy;
835: PetscInt nv = pcbddc->mat_graph->nvtxs_csr;
837: if (nv && nv == nt) {
838: ISLocalToGlobalMapping pmap;
839: PetscInt *schp_csr, *schp_xadj, *schp_adjn, p;
840: PetscContainer c;
842: PetscCall(ISLocalToGlobalMappingCreateIS(lPall, &pmap));
843: PetscCall(ISGetIndices(lPall, &idxs));
844: for (p = 0, nv = 0; p < np; p++) {
845: PetscInt x, n = idxs[p];
847: PetscCall(ISGlobalToLocalMappingApply(pmap, IS_GTOLM_DROP, xadj[n + 1] - xadj[n], adjn + xadj[n], &x, NULL));
848: nv += x;
849: }
850: PetscCall(PetscMalloc1(np + 1 + nv, &schp_csr));
851: schp_xadj = schp_csr;
852: schp_adjn = schp_csr + np + 1;
853: for (p = 0, schp_xadj[0] = 0; p < np; p++) {
854: PetscInt x, n = idxs[p];
856: PetscCall(ISGlobalToLocalMappingApply(pmap, IS_GTOLM_DROP, xadj[n + 1] - xadj[n], adjn + xadj[n], &x, schp_adjn + schp_xadj[p]));
857: schp_xadj[p + 1] = schp_xadj[p] + x;
858: }
859: PetscCall(ISRestoreIndices(lPall, &idxs));
860: PetscCall(ISLocalToGlobalMappingDestroy(&pmap));
861: PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &c));
862: PetscCall(PetscContainerSetPointer(c, schp_csr));
863: PetscCall(PetscContainerSetUserDestroy(c, PetscContainerUserDestroyDefault));
864: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_pCSR", (PetscObject)c));
865: PetscCall(PetscContainerDestroy(&c));
866: }
867: }
868: }
869: PetscCall(ISDestroy(&lPall));
870: PetscCall(ISDestroy(&lP));
871: fetidp->pP = pP;
872: }
874: /* total number of selected pressure dofs */
875: PetscCall(ISGetSize(fetidp->pP, &totP));
877: /* Set operator for inner BDDC */
878: if (totP || fetidp->rhs_flip) {
879: PetscCall(MatDuplicate(A, MAT_COPY_VALUES, &nA));
880: } else {
881: PetscCall(PetscObjectReference((PetscObject)A));
882: nA = A;
883: }
884: if (fetidp->rhs_flip) {
885: PetscCall(MatDiagonalScale(nA, fetidp->rhs_flip, NULL));
886: if (totP) {
887: Mat lA2;
889: PetscCall(MatISGetLocalMat(nA, &lA));
890: PetscCall(MatDuplicate(lA, MAT_COPY_VALUES, &lA2));
891: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_lA", (PetscObject)lA2));
892: PetscCall(MatDestroy(&lA2));
893: }
894: }
896: if (totP) {
897: PetscCall(MatSetOption(nA, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_FALSE));
898: PetscCall(MatZeroRowsColumnsIS(nA, fetidp->pP, 1., NULL, NULL));
899: } else {
900: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_lA", NULL));
901: }
902: PetscCall(MatGetNearNullSpace(Ap, &nnsp));
903: if (!nnsp) PetscCall(MatGetNullSpace(Ap, &nnsp));
904: if (!nnsp) PetscCall(MatGetNearNullSpace(A, &nnsp));
905: if (!nnsp) PetscCall(MatGetNullSpace(A, &nnsp));
906: PetscCall(MatSetNearNullSpace(nA, nnsp));
907: PetscCall(PCSetOperators(fetidp->innerbddc, nA, nA));
908: PetscCall(MatDestroy(&nA));
910: /* non-zero rhs on interior dofs when applying the preconditioner */
911: if (totP) pcbddc->switch_static = PETSC_TRUE;
913: /* if there are no interface pressures, set inner bddc flag for benign saddle point */
914: if (!totP) {
915: pcbddc->benign_saddle_point = PETSC_TRUE;
916: pcbddc->compute_nonetflux = PETSC_TRUE;
917: }
919: /* Operators for pressure preconditioner */
920: if (totP) {
921: /* Extract pressure block if needed */
922: if (!pisz) {
923: Mat C;
924: IS nzrows = NULL;
926: PetscCall(MatCreateSubMatrix(A, fetidp->pP, fetidp->pP, MAT_INITIAL_MATRIX, &C));
927: PetscCall(MatFindNonzeroRows(C, &nzrows));
928: if (nzrows) {
929: PetscInt i;
931: PetscCall(ISGetSize(nzrows, &i));
932: PetscCall(ISDestroy(&nzrows));
933: if (!i) pisz = PETSC_TRUE;
934: }
935: if (!pisz) {
936: PetscCall(MatScale(C, -1.)); /* i.e. Almost Incompressible Elasticity, Stokes discretized with Q1xQ1_stabilized */
937: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_C", (PetscObject)C));
938: }
939: PetscCall(MatDestroy(&C));
940: }
941: /* Divergence mat */
942: if (!pcbddc->divudotp) {
943: Mat B;
944: IS P;
945: IS l2l = NULL;
946: PetscBool save;
948: PetscCall(PetscObjectQuery((PetscObject)fetidp->innerbddc, "__KSPFETIDP_aP", (PetscObject *)&P));
949: if (!pisz) {
950: IS F, V;
951: PetscInt m, M;
953: PetscCall(MatGetOwnershipRange(A, &m, &M));
954: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)A), M - m, m, 1, &F));
955: PetscCall(ISComplement(P, m, M, &V));
956: PetscCall(MatCreateSubMatrix(A, P, V, MAT_INITIAL_MATRIX, &B));
957: {
958: Mat_IS *Bmatis = (Mat_IS *)B->data;
959: PetscCall(PetscObjectReference((PetscObject)Bmatis->getsub_cis));
960: l2l = Bmatis->getsub_cis;
961: }
962: PetscCall(ISDestroy(&V));
963: PetscCall(ISDestroy(&F));
964: } else {
965: PetscCall(MatCreateSubMatrix(A, P, NULL, MAT_INITIAL_MATRIX, &B));
966: }
967: save = pcbddc->compute_nonetflux; /* SetDivergenceMat activates nonetflux computation */
968: PetscCall(PCBDDCSetDivergenceMat(fetidp->innerbddc, B, PETSC_FALSE, l2l));
969: pcbddc->compute_nonetflux = save;
970: PetscCall(MatDestroy(&B));
971: PetscCall(ISDestroy(&l2l));
972: }
973: if (A != Ap) { /* user has provided a different Pmat, this always superseeds the setter (TODO: is it OK?) */
974: /* use monolithic operator, we restrict later */
975: PetscCall(KSPFETIDPSetPressureOperator(ksp, Ap));
976: }
977: PetscCall(PetscObjectQuery((PetscObject)fetidp->innerbddc, "__KSPFETIDP_PPmat", (PetscObject *)&PPmat));
979: /* PPmat not present, use some default choice */
980: if (!PPmat) {
981: Mat C;
983: PetscCall(PetscObjectQuery((PetscObject)fetidp->innerbddc, "__KSPFETIDP_C", (PetscObject *)&C));
984: if (!schp && C) { /* non-zero pressure block, most likely Almost Incompressible Elasticity */
985: PetscCall(KSPFETIDPSetPressureOperator(ksp, C));
986: } else if (!pisz && schp) { /* we need the whole pressure mass matrix to define the interface BDDC */
987: IS P;
989: PetscCall(PetscObjectQuery((PetscObject)fetidp->innerbddc, "__KSPFETIDP_aP", (PetscObject *)&P));
990: PetscCall(MatCreateSubMatrix(A, P, P, MAT_INITIAL_MATRIX, &C));
991: PetscCall(MatScale(C, -1.));
992: PetscCall(KSPFETIDPSetPressureOperator(ksp, C));
993: PetscCall(MatDestroy(&C));
994: } else { /* identity (need to be scaled properly by the user using e.g. a Richardson method */
995: PetscInt nl;
997: PetscCall(ISGetLocalSize(fetidp->pP, &nl));
998: PetscCall(MatCreate(PetscObjectComm((PetscObject)ksp), &C));
999: PetscCall(MatSetSizes(C, nl, nl, totP, totP));
1000: PetscCall(MatSetType(C, MATAIJ));
1001: PetscCall(MatMPIAIJSetPreallocation(C, 1, NULL, 0, NULL));
1002: PetscCall(MatSeqAIJSetPreallocation(C, 1, NULL));
1003: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
1004: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
1005: PetscCall(MatShift(C, 1.));
1006: PetscCall(KSPFETIDPSetPressureOperator(ksp, C));
1007: PetscCall(MatDestroy(&C));
1008: }
1009: }
1011: /* Preconditioned operator for the pressure block */
1012: PetscCall(PetscObjectQuery((PetscObject)fetidp->innerbddc, "__KSPFETIDP_PPmat", (PetscObject *)&PPmat));
1013: if (PPmat) {
1014: Mat C;
1015: IS Pall;
1016: PetscInt AM, PAM, PAN, pam, pan, am, an, pl, pIl, pAg, pIg;
1018: PetscCall(PetscObjectQuery((PetscObject)fetidp->innerbddc, "__KSPFETIDP_aP", (PetscObject *)&Pall));
1019: PetscCall(MatGetSize(A, &AM, NULL));
1020: PetscCall(MatGetSize(PPmat, &PAM, &PAN));
1021: PetscCall(ISGetSize(Pall, &pAg));
1022: PetscCall(ISGetSize(fetidp->pP, &pIg));
1023: PetscCall(MatGetLocalSize(PPmat, &pam, &pan));
1024: PetscCall(MatGetLocalSize(A, &am, &an));
1025: PetscCall(ISGetLocalSize(Pall, &pIl));
1026: PetscCall(ISGetLocalSize(fetidp->pP, &pl));
1027: PetscCheck(PAM == PAN, PetscObjectComm((PetscObject)ksp), PETSC_ERR_USER, "Pressure matrix must be square, unsupported %" PetscInt_FMT " x %" PetscInt_FMT, PAM, PAN);
1028: PetscCheck(pam == pan, PetscObjectComm((PetscObject)ksp), PETSC_ERR_USER, "Local sizes of pressure matrix must be equal, unsupported %" PetscInt_FMT " x %" PetscInt_FMT, pam, pan);
1029: PetscCheck(pam == am || pam == pl || pam == pIl, PETSC_COMM_SELF, PETSC_ERR_USER, "Invalid number of local rows %" PetscInt_FMT " for pressure matrix! Supported are %" PetscInt_FMT ", %" PetscInt_FMT " or %" PetscInt_FMT, pam, am, pl, pIl);
1030: PetscCheck(pan == an || pan == pl || pan == pIl, PETSC_COMM_SELF, PETSC_ERR_USER, "Invalid number of local columns %" PetscInt_FMT " for pressure matrix! Supported are %" PetscInt_FMT ", %" PetscInt_FMT " or %" PetscInt_FMT, pan, an, pl, pIl);
1031: if (PAM == AM) { /* monolithic ordering, restrict to pressure */
1032: if (schp) {
1033: PetscCall(MatCreateSubMatrix(PPmat, Pall, Pall, MAT_INITIAL_MATRIX, &C));
1034: } else {
1035: PetscCall(MatCreateSubMatrix(PPmat, fetidp->pP, fetidp->pP, MAT_INITIAL_MATRIX, &C));
1036: }
1037: } else if (pAg == PAM) { /* global ordering for pressure only */
1038: if (!allp && !schp) { /* solving for interface pressure only */
1039: IS restr;
1041: PetscCall(ISRenumber(fetidp->pP, NULL, NULL, &restr));
1042: PetscCall(MatCreateSubMatrix(PPmat, restr, restr, MAT_INITIAL_MATRIX, &C));
1043: PetscCall(ISDestroy(&restr));
1044: } else {
1045: PetscCall(PetscObjectReference((PetscObject)PPmat));
1046: C = PPmat;
1047: }
1048: } else if (pIg == PAM) { /* global ordering for selected pressure only */
1049: PetscCheck(!schp, PetscObjectComm((PetscObject)ksp), PETSC_ERR_PLIB, "Need the entire matrix");
1050: PetscCall(PetscObjectReference((PetscObject)PPmat));
1051: C = PPmat;
1052: } else SETERRQ(PetscObjectComm((PetscObject)ksp), PETSC_ERR_USER, "Unable to use the pressure matrix");
1054: PetscCall(KSPFETIDPSetPressureOperator(ksp, C));
1055: PetscCall(MatDestroy(&C));
1056: } else SETERRQ(PetscObjectComm((PetscObject)ksp), PETSC_ERR_PLIB, "Missing Pmat for pressure block");
1057: } else { /* totP == 0 */
1058: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_pP", NULL));
1059: }
1060: }
1061: PetscFunctionReturn(PETSC_SUCCESS);
1062: }
1064: static PetscErrorCode KSPSetUp_FETIDP(KSP ksp)
1065: {
1066: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
1067: PC_BDDC *pcbddc = (PC_BDDC *)fetidp->innerbddc->data;
1068: PetscBool flg;
1070: PetscFunctionBegin;
1071: PetscCall(KSPFETIDPSetUpOperators(ksp));
1072: /* set up BDDC */
1073: PetscCall(PCSetErrorIfFailure(fetidp->innerbddc, ksp->errorifnotconverged));
1074: PetscCall(PCSetUp(fetidp->innerbddc));
1075: /* FETI-DP as it is implemented needs an exact coarse solver */
1076: if (pcbddc->coarse_ksp) {
1077: PetscCall(KSPSetTolerances(pcbddc->coarse_ksp, PETSC_SMALL, PETSC_SMALL, PETSC_DEFAULT, 1000));
1078: PetscCall(KSPSetNormType(pcbddc->coarse_ksp, KSP_NORM_DEFAULT));
1079: }
1080: /* FETI-DP as it is implemented needs exact local Neumann solvers */
1081: PetscCall(KSPSetTolerances(pcbddc->ksp_R, PETSC_SMALL, PETSC_SMALL, PETSC_DEFAULT, 1000));
1082: PetscCall(KSPSetNormType(pcbddc->ksp_R, KSP_NORM_DEFAULT));
1084: /* setup FETI-DP operators
1085: If fetidp->statechanged is true, we need to update the operators
1086: needed in the saddle-point case. This should be replaced
1087: by a better logic when the FETI-DP matrix and preconditioner will
1088: have their own classes */
1089: if (pcbddc->new_primal_space || fetidp->statechanged) {
1090: Mat F; /* the FETI-DP matrix */
1091: PC D; /* the FETI-DP preconditioner */
1092: PetscCall(KSPReset(fetidp->innerksp));
1093: PetscCall(PCBDDCCreateFETIDPOperators(fetidp->innerbddc, fetidp->fully_redundant, ((PetscObject)ksp)->prefix, &F, &D));
1094: PetscCall(KSPSetOperators(fetidp->innerksp, F, F));
1095: PetscCall(KSPSetTolerances(fetidp->innerksp, ksp->rtol, ksp->abstol, ksp->divtol, ksp->max_it));
1096: PetscCall(KSPSetPC(fetidp->innerksp, D));
1097: PetscCall(PetscObjectIncrementTabLevel((PetscObject)D, (PetscObject)fetidp->innerksp, 0));
1098: PetscCall(KSPSetFromOptions(fetidp->innerksp));
1099: PetscCall(MatCreateVecs(F, &(fetidp->innerksp)->vec_rhs, &(fetidp->innerksp)->vec_sol));
1100: PetscCall(MatDestroy(&F));
1101: PetscCall(PCDestroy(&D));
1102: if (fetidp->check) {
1103: PetscViewer viewer;
1105: if (!pcbddc->dbg_viewer) {
1106: viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)ksp));
1107: } else {
1108: viewer = pcbddc->dbg_viewer;
1109: }
1110: PetscCall(KSPFETIDPCheckOperators(ksp, viewer));
1111: }
1112: }
1113: fetidp->statechanged = PETSC_FALSE;
1114: pcbddc->new_primal_space = PETSC_FALSE;
1116: /* propagate settings to the inner solve */
1117: PetscCall(KSPGetComputeSingularValues(ksp, &flg));
1118: PetscCall(KSPSetComputeSingularValues(fetidp->innerksp, flg));
1119: if (ksp->res_hist) PetscCall(KSPSetResidualHistory(fetidp->innerksp, ksp->res_hist, ksp->res_hist_max, ksp->res_hist_reset));
1120: PetscCall(KSPSetErrorIfNotConverged(fetidp->innerksp, ksp->errorifnotconverged));
1121: PetscCall(KSPSetUp(fetidp->innerksp));
1122: PetscFunctionReturn(PETSC_SUCCESS);
1123: }
1125: static PetscErrorCode KSPSolve_FETIDP(KSP ksp)
1126: {
1127: Mat F, A;
1128: MatNullSpace nsp;
1129: Vec X, B, Xl, Bl;
1130: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
1131: PC_BDDC *pcbddc = (PC_BDDC *)fetidp->innerbddc->data;
1132: KSPConvergedReason reason;
1133: PC pc;
1134: PCFailedReason pcreason;
1135: PetscInt hist_len;
1137: PetscFunctionBegin;
1138: PetscCall(PetscCitationsRegister(citation, &cited));
1139: if (fetidp->saddlepoint) PetscCall(PetscCitationsRegister(citation2, &cited2));
1140: PetscCall(KSPGetOperators(ksp, &A, NULL));
1141: PetscCall(KSPGetRhs(ksp, &B));
1142: PetscCall(KSPGetSolution(ksp, &X));
1143: PetscCall(KSPGetOperators(fetidp->innerksp, &F, NULL));
1144: PetscCall(KSPGetRhs(fetidp->innerksp, &Bl));
1145: PetscCall(KSPGetSolution(fetidp->innerksp, &Xl));
1146: PetscCall(PCBDDCMatFETIDPGetRHS(F, B, Bl));
1147: if (ksp->transpose_solve) {
1148: PetscCall(KSPSolveTranspose(fetidp->innerksp, Bl, Xl));
1149: } else {
1150: PetscCall(KSPSolve(fetidp->innerksp, Bl, Xl));
1151: }
1152: PetscCall(KSPGetConvergedReason(fetidp->innerksp, &reason));
1153: PetscCall(KSPGetPC(fetidp->innerksp, &pc));
1154: PetscCall(PCGetFailedReason(pc, &pcreason));
1155: if ((reason < 0 && reason != KSP_DIVERGED_ITS) || pcreason) {
1156: PetscInt its;
1157: PetscCall(KSPGetIterationNumber(fetidp->innerksp, &its));
1158: ksp->reason = KSP_DIVERGED_PC_FAILED;
1159: PetscCall(VecSetInf(Xl));
1160: PetscCall(PetscInfo(ksp, "Inner KSP solve failed: %s %s at iteration %" PetscInt_FMT, KSPConvergedReasons[reason], PCFailedReasons[pcreason], its));
1161: }
1162: PetscCall(PCBDDCMatFETIDPGetSolution(F, Xl, X));
1163: PetscCall(MatGetNullSpace(A, &nsp));
1164: if (nsp) PetscCall(MatNullSpaceRemove(nsp, X));
1165: /* update ksp with stats from inner ksp */
1166: PetscCall(KSPGetConvergedReason(fetidp->innerksp, &ksp->reason));
1167: PetscCall(KSPGetIterationNumber(fetidp->innerksp, &ksp->its));
1168: ksp->totalits += ksp->its;
1169: PetscCall(KSPGetResidualHistory(fetidp->innerksp, NULL, &hist_len));
1170: ksp->res_hist_len = (size_t)hist_len;
1171: /* restore defaults for inner BDDC (Pre/PostSolve flags) */
1172: pcbddc->temp_solution_used = PETSC_FALSE;
1173: pcbddc->rhs_change = PETSC_FALSE;
1174: pcbddc->exact_dirichlet_trick_app = PETSC_FALSE;
1175: PetscFunctionReturn(PETSC_SUCCESS);
1176: }
1178: static PetscErrorCode KSPReset_FETIDP(KSP ksp)
1179: {
1180: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
1181: PC_BDDC *pcbddc;
1183: PetscFunctionBegin;
1184: PetscCall(ISDestroy(&fetidp->pP));
1185: PetscCall(VecDestroy(&fetidp->rhs_flip));
1186: /* avoid PCReset that does not take into account ref counting */
1187: PetscCall(PCDestroy(&fetidp->innerbddc));
1188: PetscCall(PCCreate(PetscObjectComm((PetscObject)ksp), &fetidp->innerbddc));
1189: PetscCall(PCSetType(fetidp->innerbddc, PCBDDC));
1190: pcbddc = (PC_BDDC *)fetidp->innerbddc->data;
1191: pcbddc->symmetric_primal = PETSC_FALSE;
1192: PetscCall(KSPDestroy(&fetidp->innerksp));
1193: fetidp->saddlepoint = PETSC_FALSE;
1194: fetidp->matstate = -1;
1195: fetidp->matnnzstate = -1;
1196: fetidp->statechanged = PETSC_TRUE;
1197: PetscFunctionReturn(PETSC_SUCCESS);
1198: }
1200: static PetscErrorCode KSPDestroy_FETIDP(KSP ksp)
1201: {
1202: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
1204: PetscFunctionBegin;
1205: PetscCall(KSPReset_FETIDP(ksp));
1206: PetscCall(PCDestroy(&fetidp->innerbddc));
1207: PetscCall(KSPDestroy(&fetidp->innerksp));
1208: PetscCall(PetscFree(fetidp->monctx));
1209: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFETIDPSetInnerBDDC_C", NULL));
1210: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFETIDPGetInnerBDDC_C", NULL));
1211: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFETIDPGetInnerKSP_C", NULL));
1212: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFETIDPSetPressureOperator_C", NULL));
1213: PetscCall(PetscFree(ksp->data));
1214: PetscFunctionReturn(PETSC_SUCCESS);
1215: }
1217: static PetscErrorCode KSPView_FETIDP(KSP ksp, PetscViewer viewer)
1218: {
1219: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
1220: PetscBool iascii;
1222: PetscFunctionBegin;
1223: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1224: if (iascii) {
1225: PetscCall(PetscViewerASCIIPrintf(viewer, " fully redundant: %d\n", fetidp->fully_redundant));
1226: PetscCall(PetscViewerASCIIPrintf(viewer, " saddle point: %d\n", fetidp->saddlepoint));
1227: PetscCall(PetscViewerASCIIPrintf(viewer, "Inner KSP solver details\n"));
1228: }
1229: PetscCall(PetscViewerASCIIPushTab(viewer));
1230: PetscCall(KSPView(fetidp->innerksp, viewer));
1231: PetscCall(PetscViewerASCIIPopTab(viewer));
1232: if (iascii) PetscCall(PetscViewerASCIIPrintf(viewer, "Inner BDDC solver details\n"));
1233: PetscCall(PetscViewerASCIIPushTab(viewer));
1234: PetscCall(PCView(fetidp->innerbddc, viewer));
1235: PetscCall(PetscViewerASCIIPopTab(viewer));
1236: PetscFunctionReturn(PETSC_SUCCESS);
1237: }
1239: static PetscErrorCode KSPSetFromOptions_FETIDP(KSP ksp, PetscOptionItems *PetscOptionsObject)
1240: {
1241: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
1243: PetscFunctionBegin;
1244: /* set options prefixes for the inner objects, since the parent prefix will be valid at this point */
1245: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)fetidp->innerksp, ((PetscObject)ksp)->prefix));
1246: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)fetidp->innerksp, "fetidp_"));
1247: if (!fetidp->userbddc) {
1248: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)fetidp->innerbddc, ((PetscObject)ksp)->prefix));
1249: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)fetidp->innerbddc, "fetidp_bddc_"));
1250: }
1251: PetscOptionsHeadBegin(PetscOptionsObject, "KSP FETIDP options");
1252: PetscCall(PetscOptionsBool("-ksp_fetidp_fullyredundant", "Use fully redundant multipliers", "none", fetidp->fully_redundant, &fetidp->fully_redundant, NULL));
1253: PetscCall(PetscOptionsBool("-ksp_fetidp_saddlepoint", "Activates support for saddle-point problems", NULL, fetidp->saddlepoint, &fetidp->saddlepoint, NULL));
1254: PetscCall(PetscOptionsBool("-ksp_fetidp_check", "Activates verbose debugging output FETI-DP operators", NULL, fetidp->check, &fetidp->check, NULL));
1255: PetscOptionsHeadEnd();
1256: PetscCall(PCSetFromOptions(fetidp->innerbddc));
1257: PetscFunctionReturn(PETSC_SUCCESS);
1258: }
1260: /*MC
1261: KSPFETIDP - The FETI-DP method [1]
1263: Options Database Keys:
1264: + -ksp_fetidp_fullyredundant <false> - use a fully redundant set of Lagrange multipliers
1265: . -ksp_fetidp_saddlepoint <false> - activates support for saddle point problems, see [2]
1266: . -ksp_fetidp_saddlepoint_flip <false> - usually, an incompressible Stokes problem is written as
1267: | A B^T | | v | = | f |
1268: | B 0 | | p | = | g |
1269: with B representing -\int_\Omega \nabla \cdot u q.
1270: If -ksp_fetidp_saddlepoint_flip is true, the code assumes that the user provides it as
1271: | A B^T | | v | = | f |
1272: |-B 0 | | p | = |-g |
1273: . -ksp_fetidp_pressure_field <-1> - activates support for saddle point problems, and identifies the pressure field id.
1274: If this information is not provided, the pressure field is detected by using MatFindZeroDiagonals().
1275: - -ksp_fetidp_pressure_all <false> - if false, uses the interface pressures, as described in [2]. If true, uses the entire pressure field.
1277: Level: Advanced
1279: Notes:
1280: The matrix for the `KSP` must be of type `MATIS`.
1282: The FETI-DP linear system (automatically generated constructing an internal `PCBDDC` object) is solved using an internal `KSP` object.
1284: Options for the inner `KSP` and for the customization of the `PCBDDC` object can be specified at command line by using the prefixes `-fetidp_` and `-fetidp_bddc_`. E.g.,
1285: .vb
1286: -fetidp_ksp_type gmres -fetidp_bddc_pc_bddc_symmetric false
1287: .ve
1288: will use `KSPGMRES` for the solution of the linear system on the Lagrange multipliers, generated using a non-symmetric `PCBDDC`.
1290: For saddle point problems with continuous pressures, the preconditioned operator for the pressure solver can be specified with `KSPFETIDPSetPressureOperator()`.
1291: Alternatively, the pressure operator is extracted from the precondioned matrix (if it is different from the linear solver matrix).
1292: If none of the above, an identity matrix will be created; the user then needs to scale it through a Richardson solver.
1293: Options for the pressure solver can be prefixed with `-fetidp_fielsplit_p_`, E.g.
1294: .vb
1295: -fetidp_fielsplit_p_ksp_type preonly -fetidp_fielsplit_p_pc_type lu -fetidp_fielsplit_p_pc_factor_mat_solver_type mumps
1296: .ve
1297: In order to use the deluxe version of FETI-DP, you must customize the inner `PCBDDC` operator with -fetidp_bddc_pc_bddc_use_deluxe_scaling -fetidp_bddc_pc_bddc_deluxe_singlemat and use
1298: non-redundant multipliers, i.e. `-ksp_fetidp_fullyredundant false`. Options for the scaling solver are prefixed by `-fetidp_bddelta_`, E.g.
1299: .vb
1300: -fetidp_bddelta_pc_factor_mat_solver_type mumps -fetidp_bddelta_pc_type lu
1301: .ve
1303: Some of the basic options such as the maximum number of iterations and tolerances are automatically passed from this `KSP` to the inner `KSP` that actually performs the iterations.
1305: The converged reason and number of iterations computed are passed from the inner `KSP` to this `KSP` at the end of the solution.
1307: Developer Note:
1308: Even though this method does not directly use any norms, the user is allowed to set the `KSPNormType` to any value.
1309: This is so users do not have to change `KSPNormType` options when they switch from other `KSP` methods to this one.
1311: References:
1312: + [1] - C. Farhat, M. Lesoinne, P. LeTallec, K. Pierson, and D. Rixen, FETI-DP: a dual-primal unified FETI method. I. A faster alternative to the two-level FETI method, Internat. J. Numer. Methods Engrg., 50 (2001), pp. 1523--1544
1313: - [2] - X. Tu, J. Li, A FETI-DP type domain decomposition algorithm for three-dimensional incompressible Stokes equations, SIAM J. Numer. Anal., 53 (2015), pp. 720-742
1315: .seealso: [](ch_ksp), `MATIS`, `PCBDDC`, `KSPFETIDPSetInnerBDDC()`, `KSPFETIDPGetInnerBDDC()`, `KSPFETIDPGetInnerKSP()`
1316: M*/
1317: PETSC_EXTERN PetscErrorCode KSPCreate_FETIDP(KSP ksp)
1318: {
1319: KSP_FETIDP *fetidp;
1320: KSP_FETIDPMon *monctx;
1321: PC_BDDC *pcbddc;
1322: PC pc;
1324: PetscFunctionBegin;
1325: PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_NONE, PC_LEFT, 3));
1326: PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_NONE, PC_RIGHT, 2));
1327: PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_PRECONDITIONED, PC_LEFT, 2));
1328: PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_PRECONDITIONED, PC_RIGHT, 2));
1329: PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_UNPRECONDITIONED, PC_LEFT, 2));
1330: PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_UNPRECONDITIONED, PC_RIGHT, 2));
1331: PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_NATURAL, PC_LEFT, 2));
1333: PetscCall(PetscNew(&fetidp));
1334: fetidp->matstate = -1;
1335: fetidp->matnnzstate = -1;
1336: fetidp->statechanged = PETSC_TRUE;
1338: ksp->data = (void *)fetidp;
1339: ksp->ops->setup = KSPSetUp_FETIDP;
1340: ksp->ops->solve = KSPSolve_FETIDP;
1341: ksp->ops->destroy = KSPDestroy_FETIDP;
1342: ksp->ops->computeeigenvalues = KSPComputeEigenvalues_FETIDP;
1343: ksp->ops->computeextremesingularvalues = KSPComputeExtremeSingularValues_FETIDP;
1344: ksp->ops->view = KSPView_FETIDP;
1345: ksp->ops->setfromoptions = KSPSetFromOptions_FETIDP;
1346: ksp->ops->buildsolution = KSPBuildSolution_FETIDP;
1347: ksp->ops->buildresidual = KSPBuildResidualDefault;
1348: PetscCall(KSPGetPC(ksp, &pc));
1349: PetscCall(PCSetType(pc, PCNONE));
1350: /* create the inner KSP for the Lagrange multipliers */
1351: PetscCall(KSPCreate(PetscObjectComm((PetscObject)ksp), &fetidp->innerksp));
1352: PetscCall(KSPGetPC(fetidp->innerksp, &pc));
1353: PetscCall(PCSetType(pc, PCNONE));
1354: /* monitor */
1355: PetscCall(PetscNew(&monctx));
1356: monctx->parentksp = ksp;
1357: fetidp->monctx = monctx;
1358: PetscCall(KSPMonitorSet(fetidp->innerksp, KSPMonitor_FETIDP, fetidp->monctx, NULL));
1359: /* create the inner BDDC */
1360: PetscCall(PCCreate(PetscObjectComm((PetscObject)ksp), &fetidp->innerbddc));
1361: PetscCall(PCSetType(fetidp->innerbddc, PCBDDC));
1362: /* make sure we always obtain a consistent FETI-DP matrix
1363: for symmetric problems, the user can always customize it through the command line */
1364: pcbddc = (PC_BDDC *)fetidp->innerbddc->data;
1365: pcbddc->symmetric_primal = PETSC_FALSE;
1366: /* composed functions */
1367: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFETIDPSetInnerBDDC_C", KSPFETIDPSetInnerBDDC_FETIDP));
1368: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFETIDPGetInnerBDDC_C", KSPFETIDPGetInnerBDDC_FETIDP));
1369: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFETIDPGetInnerKSP_C", KSPFETIDPGetInnerKSP_FETIDP));
1370: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFETIDPSetPressureOperator_C", KSPFETIDPSetPressureOperator_FETIDP));
1371: /* need to call KSPSetUp_FETIDP even with KSP_SETUP_NEWMATRIX */
1372: ksp->setupnewmatrix = PETSC_TRUE;
1373: PetscFunctionReturn(PETSC_SUCCESS);
1374: }