Actual source code: wb.c
2: #include <petscdmda.h>
3: #include <petsc/private/pcmgimpl.h>
4: #include <petsc/private/hashmapi.h>
6: typedef struct {
7: PCExoticType type;
8: Mat P; /* the constructed interpolation matrix */
9: PetscBool directSolve; /* use direct LU factorization to construct interpolation */
10: KSP ksp;
11: } PC_Exotic;
13: const char *const PCExoticTypes[] = {"face", "wirebasket", "PCExoticType", "PC_Exotic", NULL};
15: /*
16: DMDAGetWireBasketInterpolation - Gets the interpolation for a wirebasket based coarse space
18: */
19: PetscErrorCode DMDAGetWireBasketInterpolation(PC pc, DM da, PC_Exotic *exotic, Mat Aglobal, MatReuse reuse, Mat *P)
20: {
21: PetscInt dim, i, j, k, m, n, p, dof, Nint, Nface, Nwire, Nsurf, *Iint, *Isurf, cint = 0, csurf = 0, istart, jstart, kstart, *II, N, c = 0;
22: PetscInt mwidth, nwidth, pwidth, cnt, mp, np, pp, Ntotal, gl[26], *globals, Ng, *IIint, *IIsurf;
23: Mat Xint, Xsurf, Xint_tmp;
24: IS isint, issurf, is, row, col;
25: ISLocalToGlobalMapping ltg;
26: MPI_Comm comm;
27: Mat A, Aii, Ais, Asi, *Aholder, iAii;
28: MatFactorInfo info;
29: PetscScalar *xsurf, *xint;
30: const PetscScalar *rxint;
31: #if defined(PETSC_USE_DEBUG_foo)
32: PetscScalar tmp;
33: #endif
34: PetscHMapI ht = NULL;
36: PetscFunctionBegin;
37: PetscCall(DMDAGetInfo(da, &dim, NULL, NULL, NULL, &mp, &np, &pp, &dof, NULL, NULL, NULL, NULL, NULL));
38: PetscCheck(dof == 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only for single field problems");
39: PetscCheck(dim == 3, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only coded for 3d problems");
40: PetscCall(DMDAGetCorners(da, NULL, NULL, NULL, &m, &n, &p));
41: PetscCall(DMDAGetGhostCorners(da, &istart, &jstart, &kstart, &mwidth, &nwidth, &pwidth));
42: istart = istart ? -1 : 0;
43: jstart = jstart ? -1 : 0;
44: kstart = kstart ? -1 : 0;
46: /*
47: the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
48: to all the local degrees of freedom (this includes the vertices, edges and faces).
50: Xint are the subset of the interpolation into the interior
52: Xface are the interpolation onto faces but not into the interior
54: Xsurf are the interpolation onto the vertices and edges (the surfbasket)
55: Xint
56: Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
57: Xsurf
58: */
59: N = (m - istart) * (n - jstart) * (p - kstart);
60: Nint = (m - 2 - istart) * (n - 2 - jstart) * (p - 2 - kstart);
61: Nface = 2 * ((m - 2 - istart) * (n - 2 - jstart) + (m - 2 - istart) * (p - 2 - kstart) + (n - 2 - jstart) * (p - 2 - kstart));
62: Nwire = 4 * ((m - 2 - istart) + (n - 2 - jstart) + (p - 2 - kstart)) + 8;
63: Nsurf = Nface + Nwire;
64: PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nint, 26, NULL, &Xint));
65: PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nsurf, 26, NULL, &Xsurf));
66: PetscCall(MatDenseGetArray(Xsurf, &xsurf));
68: /*
69: Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
70: Xsurf will be all zero (thus making the coarse matrix singular).
71: */
72: PetscCheck(m - istart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in X direction must be at least 3");
73: PetscCheck(n - jstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Y direction must be at least 3");
74: PetscCheck(p - kstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Z direction must be at least 3");
76: cnt = 0;
78: xsurf[cnt++] = 1;
79: for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + Nsurf] = 1;
80: xsurf[cnt++ + 2 * Nsurf] = 1;
82: for (j = 1; j < n - 1 - jstart; j++) {
83: xsurf[cnt++ + 3 * Nsurf] = 1;
84: for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 4 * Nsurf] = 1;
85: xsurf[cnt++ + 5 * Nsurf] = 1;
86: }
88: xsurf[cnt++ + 6 * Nsurf] = 1;
89: for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 7 * Nsurf] = 1;
90: xsurf[cnt++ + 8 * Nsurf] = 1;
92: for (k = 1; k < p - 1 - kstart; k++) {
93: xsurf[cnt++ + 9 * Nsurf] = 1;
94: for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 10 * Nsurf] = 1;
95: xsurf[cnt++ + 11 * Nsurf] = 1;
97: for (j = 1; j < n - 1 - jstart; j++) {
98: xsurf[cnt++ + 12 * Nsurf] = 1;
99: /* these are the interior nodes */
100: xsurf[cnt++ + 13 * Nsurf] = 1;
101: }
103: xsurf[cnt++ + 14 * Nsurf] = 1;
104: for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 15 * Nsurf] = 1;
105: xsurf[cnt++ + 16 * Nsurf] = 1;
106: }
108: xsurf[cnt++ + 17 * Nsurf] = 1;
109: for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 18 * Nsurf] = 1;
110: xsurf[cnt++ + 19 * Nsurf] = 1;
112: for (j = 1; j < n - 1 - jstart; j++) {
113: xsurf[cnt++ + 20 * Nsurf] = 1;
114: for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 21 * Nsurf] = 1;
115: xsurf[cnt++ + 22 * Nsurf] = 1;
116: }
118: xsurf[cnt++ + 23 * Nsurf] = 1;
119: for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 24 * Nsurf] = 1;
120: xsurf[cnt++ + 25 * Nsurf] = 1;
122: /* interpolations only sum to 1 when using direct solver */
123: #if defined(PETSC_USE_DEBUG_foo)
124: for (i = 0; i < Nsurf; i++) {
125: tmp = 0.0;
126: for (j = 0; j < 26; j++) tmp += xsurf[i + j * Nsurf];
127: PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xsurf interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
128: }
129: #endif
130: PetscCall(MatDenseRestoreArray(Xsurf, &xsurf));
131: /* PetscCall(MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD));*/
133: /*
134: I are the indices for all the needed vertices (in global numbering)
135: Iint are the indices for the interior values, I surf for the surface values
136: (This is just for the part of the global matrix obtained with MatCreateSubMatrix(), it
137: is NOT the local DMDA ordering.)
138: IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
139: */
140: #define Endpoint(a, start, b) (a == 0 || a == (b - 1 - start))
141: PetscCall(PetscMalloc3(N, &II, Nint, &Iint, Nsurf, &Isurf));
142: PetscCall(PetscMalloc2(Nint, &IIint, Nsurf, &IIsurf));
143: for (k = 0; k < p - kstart; k++) {
144: for (j = 0; j < n - jstart; j++) {
145: for (i = 0; i < m - istart; i++) {
146: II[c++] = i + j * mwidth + k * mwidth * nwidth;
148: if (!Endpoint(i, istart, m) && !Endpoint(j, jstart, n) && !Endpoint(k, kstart, p)) {
149: IIint[cint] = i + j * mwidth + k * mwidth * nwidth;
150: Iint[cint++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
151: } else {
152: IIsurf[csurf] = i + j * mwidth + k * mwidth * nwidth;
153: Isurf[csurf++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
154: }
155: }
156: }
157: }
158: #undef Endpoint
159: PetscCheck(c == N, PETSC_COMM_SELF, PETSC_ERR_PLIB, "c != N");
160: PetscCheck(cint == Nint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "cint != Nint");
161: PetscCheck(csurf == Nsurf, PETSC_COMM_SELF, PETSC_ERR_PLIB, "csurf != Nsurf");
162: PetscCall(DMGetLocalToGlobalMapping(da, <g));
163: PetscCall(ISLocalToGlobalMappingApply(ltg, N, II, II));
164: PetscCall(ISLocalToGlobalMappingApply(ltg, Nint, IIint, IIint));
165: PetscCall(ISLocalToGlobalMappingApply(ltg, Nsurf, IIsurf, IIsurf));
166: PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
167: PetscCall(ISCreateGeneral(comm, N, II, PETSC_COPY_VALUES, &is));
168: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nint, Iint, PETSC_COPY_VALUES, &isint));
169: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nsurf, Isurf, PETSC_COPY_VALUES, &issurf));
170: PetscCall(PetscFree3(II, Iint, Isurf));
172: PetscCall(MatCreateSubMatrices(Aglobal, 1, &is, &is, MAT_INITIAL_MATRIX, &Aholder));
173: A = *Aholder;
174: PetscCall(PetscFree(Aholder));
176: PetscCall(MatCreateSubMatrix(A, isint, isint, MAT_INITIAL_MATRIX, &Aii));
177: PetscCall(MatCreateSubMatrix(A, isint, issurf, MAT_INITIAL_MATRIX, &Ais));
178: PetscCall(MatCreateSubMatrix(A, issurf, isint, MAT_INITIAL_MATRIX, &Asi));
180: /*
181: Solve for the interpolation onto the interior Xint
182: */
183: PetscCall(MatMatMult(Ais, Xsurf, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &Xint_tmp));
184: PetscCall(MatScale(Xint_tmp, -1.0));
185: if (exotic->directSolve) {
186: PetscCall(MatGetFactor(Aii, MATSOLVERPETSC, MAT_FACTOR_LU, &iAii));
187: PetscCall(MatFactorInfoInitialize(&info));
188: PetscCall(MatGetOrdering(Aii, MATORDERINGND, &row, &col));
189: PetscCall(MatLUFactorSymbolic(iAii, Aii, row, col, &info));
190: PetscCall(ISDestroy(&row));
191: PetscCall(ISDestroy(&col));
192: PetscCall(MatLUFactorNumeric(iAii, Aii, &info));
193: PetscCall(MatMatSolve(iAii, Xint_tmp, Xint));
194: PetscCall(MatDestroy(&iAii));
195: } else {
196: Vec b, x;
197: PetscScalar *xint_tmp;
199: PetscCall(MatDenseGetArray(Xint, &xint));
200: PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &x));
201: PetscCall(MatDenseGetArray(Xint_tmp, &xint_tmp));
202: PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &b));
203: PetscCall(KSPSetOperators(exotic->ksp, Aii, Aii));
204: for (i = 0; i < 26; i++) {
205: PetscCall(VecPlaceArray(x, xint + i * Nint));
206: PetscCall(VecPlaceArray(b, xint_tmp + i * Nint));
207: PetscCall(KSPSolve(exotic->ksp, b, x));
208: PetscCall(KSPCheckSolve(exotic->ksp, pc, x));
209: PetscCall(VecResetArray(x));
210: PetscCall(VecResetArray(b));
211: }
212: PetscCall(MatDenseRestoreArray(Xint, &xint));
213: PetscCall(MatDenseRestoreArray(Xint_tmp, &xint_tmp));
214: PetscCall(VecDestroy(&x));
215: PetscCall(VecDestroy(&b));
216: }
217: PetscCall(MatDestroy(&Xint_tmp));
219: #if defined(PETSC_USE_DEBUG_foo)
220: PetscCall(MatDenseGetArrayRead(Xint, &rxint));
221: for (i = 0; i < Nint; i++) {
222: tmp = 0.0;
223: for (j = 0; j < 26; j++) tmp += rxint[i + j * Nint];
225: PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xint interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
226: }
227: PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
228: /* PetscCall(MatView(Xint,PETSC_VIEWER_STDOUT_WORLD)); */
229: #endif
231: /* total vertices total faces total edges */
232: Ntotal = (mp + 1) * (np + 1) * (pp + 1) + mp * np * (pp + 1) + mp * pp * (np + 1) + np * pp * (mp + 1) + mp * (np + 1) * (pp + 1) + np * (mp + 1) * (pp + 1) + pp * (mp + 1) * (np + 1);
234: /*
235: For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
236: */
237: cnt = 0;
239: gl[cnt++] = 0;
240: {
241: gl[cnt++] = 1;
242: }
243: gl[cnt++] = m - istart - 1;
244: {
245: gl[cnt++] = mwidth;
246: {
247: gl[cnt++] = mwidth + 1;
248: }
249: gl[cnt++] = mwidth + m - istart - 1;
250: }
251: gl[cnt++] = mwidth * (n - jstart - 1);
252: {
253: gl[cnt++] = mwidth * (n - jstart - 1) + 1;
254: }
255: gl[cnt++] = mwidth * (n - jstart - 1) + m - istart - 1;
256: {
257: gl[cnt++] = mwidth * nwidth;
258: {
259: gl[cnt++] = mwidth * nwidth + 1;
260: }
261: gl[cnt++] = mwidth * nwidth + m - istart - 1;
262: {
263: gl[cnt++] = mwidth * nwidth + mwidth; /* these are the interior nodes */
264: gl[cnt++] = mwidth * nwidth + mwidth + m - istart - 1;
265: }
266: gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1);
267: {
268: gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1) + 1;
269: }
270: gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1) + m - istart - 1;
271: }
272: gl[cnt++] = mwidth * nwidth * (p - kstart - 1);
273: {
274: gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + 1;
275: }
276: gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + m - istart - 1;
277: {
278: gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth;
279: {
280: gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth + 1;
281: }
282: gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth + m - istart - 1;
283: }
284: gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth * (n - jstart - 1);
285: {
286: gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth * (n - jstart - 1) + 1;
287: }
288: gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth * (n - jstart - 1) + m - istart - 1;
290: /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
291: /* convert that to global numbering and get them on all processes */
292: PetscCall(ISLocalToGlobalMappingApply(ltg, 26, gl, gl));
293: /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
294: PetscCall(PetscMalloc1(26 * mp * np * pp, &globals));
295: PetscCallMPI(MPI_Allgather(gl, 26, MPIU_INT, globals, 26, MPIU_INT, PetscObjectComm((PetscObject)da)));
297: /* Number the coarse grid points from 0 to Ntotal */
298: PetscCall(PetscHMapICreateWithSize(Ntotal / 3, &ht));
299: for (i = 0, cnt = 0; i < 26 * mp * np * pp; i++) {
300: PetscHashIter it = 0;
301: PetscBool missing = PETSC_TRUE;
303: PetscCall(PetscHMapIPut(ht, globals[i] + 1, &it, &missing));
304: if (missing) {
305: ++cnt;
306: PetscCall(PetscHMapIIterSet(ht, it, cnt));
307: }
308: }
309: PetscCheck(cnt == Ntotal, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Hash table size %" PetscInt_FMT " not equal to total number coarse grid points %" PetscInt_FMT, cnt, Ntotal);
310: PetscCall(PetscFree(globals));
311: for (i = 0; i < 26; i++) {
312: PetscCall(PetscHMapIGetWithDefault(ht, gl[i] + 1, 0, gl + i));
313: --(gl[i]);
314: }
315: PetscCall(PetscHMapIDestroy(&ht));
316: /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
318: /* construct global interpolation matrix */
319: PetscCall(MatGetLocalSize(Aglobal, &Ng, NULL));
320: if (reuse == MAT_INITIAL_MATRIX) {
321: PetscCall(MatCreateAIJ(PetscObjectComm((PetscObject)da), Ng, PETSC_DECIDE, PETSC_DECIDE, Ntotal, Nint + Nsurf, NULL, Nint + Nsurf, NULL, P));
322: } else {
323: PetscCall(MatZeroEntries(*P));
324: }
325: PetscCall(MatSetOption(*P, MAT_ROW_ORIENTED, PETSC_FALSE));
326: PetscCall(MatDenseGetArrayRead(Xint, &rxint));
327: PetscCall(MatSetValues(*P, Nint, IIint, 26, gl, rxint, INSERT_VALUES));
328: PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
329: PetscCall(MatDenseGetArrayRead(Xsurf, &rxint));
330: PetscCall(MatSetValues(*P, Nsurf, IIsurf, 26, gl, rxint, INSERT_VALUES));
331: PetscCall(MatDenseRestoreArrayRead(Xsurf, &rxint));
332: PetscCall(MatAssemblyBegin(*P, MAT_FINAL_ASSEMBLY));
333: PetscCall(MatAssemblyEnd(*P, MAT_FINAL_ASSEMBLY));
334: PetscCall(PetscFree2(IIint, IIsurf));
336: #if defined(PETSC_USE_DEBUG_foo)
337: {
338: Vec x, y;
339: PetscScalar *yy;
340: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), Ng, PETSC_DETERMINE, &y));
341: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), PETSC_DETERMINE, Ntotal, &x));
342: PetscCall(VecSet(x, 1.0));
343: PetscCall(MatMult(*P, x, y));
344: PetscCall(VecGetArray(y, &yy));
345: for (i = 0; i < Ng; i++) PetscCheck(PetscAbsScalar(yy[i] - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong p interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(yy[i]));
346: PetscCall(VecRestoreArray(y, &yy));
347: PetscCall(VecDestroy(x));
348: PetscCall(VecDestroy(y));
349: }
350: #endif
352: PetscCall(MatDestroy(&Aii));
353: PetscCall(MatDestroy(&Ais));
354: PetscCall(MatDestroy(&Asi));
355: PetscCall(MatDestroy(&A));
356: PetscCall(ISDestroy(&is));
357: PetscCall(ISDestroy(&isint));
358: PetscCall(ISDestroy(&issurf));
359: PetscCall(MatDestroy(&Xint));
360: PetscCall(MatDestroy(&Xsurf));
361: PetscFunctionReturn(PETSC_SUCCESS);
362: }
364: /*
365: DMDAGetFaceInterpolation - Gets the interpolation for a face based coarse space
367: */
368: PetscErrorCode DMDAGetFaceInterpolation(PC pc, DM da, PC_Exotic *exotic, Mat Aglobal, MatReuse reuse, Mat *P)
369: {
370: PetscInt dim, i, j, k, m, n, p, dof, Nint, Nface, Nwire, Nsurf, *Iint, *Isurf, cint = 0, csurf = 0, istart, jstart, kstart, *II, N, c = 0;
371: PetscInt mwidth, nwidth, pwidth, cnt, mp, np, pp, Ntotal, gl[6], *globals, Ng, *IIint, *IIsurf;
372: Mat Xint, Xsurf, Xint_tmp;
373: IS isint, issurf, is, row, col;
374: ISLocalToGlobalMapping ltg;
375: MPI_Comm comm;
376: Mat A, Aii, Ais, Asi, *Aholder, iAii;
377: MatFactorInfo info;
378: PetscScalar *xsurf, *xint;
379: const PetscScalar *rxint;
380: #if defined(PETSC_USE_DEBUG_foo)
381: PetscScalar tmp;
382: #endif
383: PetscHMapI ht;
385: PetscFunctionBegin;
386: PetscCall(DMDAGetInfo(da, &dim, NULL, NULL, NULL, &mp, &np, &pp, &dof, NULL, NULL, NULL, NULL, NULL));
387: PetscCheck(dof == 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only for single field problems");
388: PetscCheck(dim == 3, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only coded for 3d problems");
389: PetscCall(DMDAGetCorners(da, NULL, NULL, NULL, &m, &n, &p));
390: PetscCall(DMDAGetGhostCorners(da, &istart, &jstart, &kstart, &mwidth, &nwidth, &pwidth));
391: istart = istart ? -1 : 0;
392: jstart = jstart ? -1 : 0;
393: kstart = kstart ? -1 : 0;
395: /*
396: the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
397: to all the local degrees of freedom (this includes the vertices, edges and faces).
399: Xint are the subset of the interpolation into the interior
401: Xface are the interpolation onto faces but not into the interior
403: Xsurf are the interpolation onto the vertices and edges (the surfbasket)
404: Xint
405: Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
406: Xsurf
407: */
408: N = (m - istart) * (n - jstart) * (p - kstart);
409: Nint = (m - 2 - istart) * (n - 2 - jstart) * (p - 2 - kstart);
410: Nface = 2 * ((m - 2 - istart) * (n - 2 - jstart) + (m - 2 - istart) * (p - 2 - kstart) + (n - 2 - jstart) * (p - 2 - kstart));
411: Nwire = 4 * ((m - 2 - istart) + (n - 2 - jstart) + (p - 2 - kstart)) + 8;
412: Nsurf = Nface + Nwire;
413: PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nint, 6, NULL, &Xint));
414: PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nsurf, 6, NULL, &Xsurf));
415: PetscCall(MatDenseGetArray(Xsurf, &xsurf));
417: /*
418: Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
419: Xsurf will be all zero (thus making the coarse matrix singular).
420: */
421: PetscCheck(m - istart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in X direction must be at least 3");
422: PetscCheck(n - jstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Y direction must be at least 3");
423: PetscCheck(p - kstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Z direction must be at least 3");
425: cnt = 0;
426: for (j = 1; j < n - 1 - jstart; j++) {
427: for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 0 * Nsurf] = 1;
428: }
430: for (k = 1; k < p - 1 - kstart; k++) {
431: for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 1 * Nsurf] = 1;
432: for (j = 1; j < n - 1 - jstart; j++) {
433: xsurf[cnt++ + 2 * Nsurf] = 1;
434: /* these are the interior nodes */
435: xsurf[cnt++ + 3 * Nsurf] = 1;
436: }
437: for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 4 * Nsurf] = 1;
438: }
439: for (j = 1; j < n - 1 - jstart; j++) {
440: for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 5 * Nsurf] = 1;
441: }
443: #if defined(PETSC_USE_DEBUG_foo)
444: for (i = 0; i < Nsurf; i++) {
445: tmp = 0.0;
446: for (j = 0; j < 6; j++) tmp += xsurf[i + j * Nsurf];
448: PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xsurf interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
449: }
450: #endif
451: PetscCall(MatDenseRestoreArray(Xsurf, &xsurf));
452: /* PetscCall(MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD));*/
454: /*
455: I are the indices for all the needed vertices (in global numbering)
456: Iint are the indices for the interior values, I surf for the surface values
457: (This is just for the part of the global matrix obtained with MatCreateSubMatrix(), it
458: is NOT the local DMDA ordering.)
459: IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
460: */
461: #define Endpoint(a, start, b) (a == 0 || a == (b - 1 - start))
462: PetscCall(PetscMalloc3(N, &II, Nint, &Iint, Nsurf, &Isurf));
463: PetscCall(PetscMalloc2(Nint, &IIint, Nsurf, &IIsurf));
464: for (k = 0; k < p - kstart; k++) {
465: for (j = 0; j < n - jstart; j++) {
466: for (i = 0; i < m - istart; i++) {
467: II[c++] = i + j * mwidth + k * mwidth * nwidth;
469: if (!Endpoint(i, istart, m) && !Endpoint(j, jstart, n) && !Endpoint(k, kstart, p)) {
470: IIint[cint] = i + j * mwidth + k * mwidth * nwidth;
471: Iint[cint++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
472: } else {
473: IIsurf[csurf] = i + j * mwidth + k * mwidth * nwidth;
474: Isurf[csurf++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
475: }
476: }
477: }
478: }
479: #undef Endpoint
480: PetscCheck(c == N, PETSC_COMM_SELF, PETSC_ERR_PLIB, "c != N");
481: PetscCheck(cint == Nint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "cint != Nint");
482: PetscCheck(csurf == Nsurf, PETSC_COMM_SELF, PETSC_ERR_PLIB, "csurf != Nsurf");
483: PetscCall(DMGetLocalToGlobalMapping(da, <g));
484: PetscCall(ISLocalToGlobalMappingApply(ltg, N, II, II));
485: PetscCall(ISLocalToGlobalMappingApply(ltg, Nint, IIint, IIint));
486: PetscCall(ISLocalToGlobalMappingApply(ltg, Nsurf, IIsurf, IIsurf));
487: PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
488: PetscCall(ISCreateGeneral(comm, N, II, PETSC_COPY_VALUES, &is));
489: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nint, Iint, PETSC_COPY_VALUES, &isint));
490: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nsurf, Isurf, PETSC_COPY_VALUES, &issurf));
491: PetscCall(PetscFree3(II, Iint, Isurf));
493: PetscCall(ISSort(is));
494: PetscCall(MatCreateSubMatrices(Aglobal, 1, &is, &is, MAT_INITIAL_MATRIX, &Aholder));
495: A = *Aholder;
496: PetscCall(PetscFree(Aholder));
498: PetscCall(MatCreateSubMatrix(A, isint, isint, MAT_INITIAL_MATRIX, &Aii));
499: PetscCall(MatCreateSubMatrix(A, isint, issurf, MAT_INITIAL_MATRIX, &Ais));
500: PetscCall(MatCreateSubMatrix(A, issurf, isint, MAT_INITIAL_MATRIX, &Asi));
502: /*
503: Solve for the interpolation onto the interior Xint
504: */
505: PetscCall(MatMatMult(Ais, Xsurf, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &Xint_tmp));
506: PetscCall(MatScale(Xint_tmp, -1.0));
508: if (exotic->directSolve) {
509: PetscCall(MatGetFactor(Aii, MATSOLVERPETSC, MAT_FACTOR_LU, &iAii));
510: PetscCall(MatFactorInfoInitialize(&info));
511: PetscCall(MatGetOrdering(Aii, MATORDERINGND, &row, &col));
512: PetscCall(MatLUFactorSymbolic(iAii, Aii, row, col, &info));
513: PetscCall(ISDestroy(&row));
514: PetscCall(ISDestroy(&col));
515: PetscCall(MatLUFactorNumeric(iAii, Aii, &info));
516: PetscCall(MatMatSolve(iAii, Xint_tmp, Xint));
517: PetscCall(MatDestroy(&iAii));
518: } else {
519: Vec b, x;
520: PetscScalar *xint_tmp;
522: PetscCall(MatDenseGetArray(Xint, &xint));
523: PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &x));
524: PetscCall(MatDenseGetArray(Xint_tmp, &xint_tmp));
525: PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &b));
526: PetscCall(KSPSetOperators(exotic->ksp, Aii, Aii));
527: for (i = 0; i < 6; i++) {
528: PetscCall(VecPlaceArray(x, xint + i * Nint));
529: PetscCall(VecPlaceArray(b, xint_tmp + i * Nint));
530: PetscCall(KSPSolve(exotic->ksp, b, x));
531: PetscCall(KSPCheckSolve(exotic->ksp, pc, x));
532: PetscCall(VecResetArray(x));
533: PetscCall(VecResetArray(b));
534: }
535: PetscCall(MatDenseRestoreArray(Xint, &xint));
536: PetscCall(MatDenseRestoreArray(Xint_tmp, &xint_tmp));
537: PetscCall(VecDestroy(&x));
538: PetscCall(VecDestroy(&b));
539: }
540: PetscCall(MatDestroy(&Xint_tmp));
542: #if defined(PETSC_USE_DEBUG_foo)
543: PetscCall(MatDenseGetArrayRead(Xint, &rxint));
544: for (i = 0; i < Nint; i++) {
545: tmp = 0.0;
546: for (j = 0; j < 6; j++) tmp += rxint[i + j * Nint];
548: PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xint interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
549: }
550: PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
551: /* PetscCall(MatView(Xint,PETSC_VIEWER_STDOUT_WORLD)); */
552: #endif
554: /* total faces */
555: Ntotal = mp * np * (pp + 1) + mp * pp * (np + 1) + np * pp * (mp + 1);
557: /*
558: For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
559: */
560: cnt = 0;
561: {
562: gl[cnt++] = mwidth + 1;
563: }
564: {{gl[cnt++] = mwidth * nwidth + 1;
565: }
566: {
567: gl[cnt++] = mwidth * nwidth + mwidth; /* these are the interior nodes */
568: gl[cnt++] = mwidth * nwidth + mwidth + m - istart - 1;
569: }
570: {
571: gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1) + 1;
572: }
573: }
574: {
575: gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth + 1;
576: }
578: /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
579: /* convert that to global numbering and get them on all processes */
580: PetscCall(ISLocalToGlobalMappingApply(ltg, 6, gl, gl));
581: /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
582: PetscCall(PetscMalloc1(6 * mp * np * pp, &globals));
583: PetscCallMPI(MPI_Allgather(gl, 6, MPIU_INT, globals, 6, MPIU_INT, PetscObjectComm((PetscObject)da)));
585: /* Number the coarse grid points from 0 to Ntotal */
586: PetscCall(PetscHMapICreateWithSize(Ntotal / 3, &ht));
587: for (i = 0, cnt = 0; i < 6 * mp * np * pp; i++) {
588: PetscHashIter it = 0;
589: PetscBool missing = PETSC_TRUE;
591: PetscCall(PetscHMapIPut(ht, globals[i] + 1, &it, &missing));
592: if (missing) {
593: ++cnt;
594: PetscCall(PetscHMapIIterSet(ht, it, cnt));
595: }
596: }
597: PetscCheck(cnt == Ntotal, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Hash table size %" PetscInt_FMT " not equal to total number coarse grid points %" PetscInt_FMT, cnt, Ntotal);
598: PetscCall(PetscFree(globals));
599: for (i = 0; i < 6; i++) {
600: PetscCall(PetscHMapIGetWithDefault(ht, gl[i] + 1, 0, gl + i));
601: --(gl[i]);
602: }
603: PetscCall(PetscHMapIDestroy(&ht));
604: /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
606: /* construct global interpolation matrix */
607: PetscCall(MatGetLocalSize(Aglobal, &Ng, NULL));
608: if (reuse == MAT_INITIAL_MATRIX) {
609: PetscCall(MatCreateAIJ(PetscObjectComm((PetscObject)da), Ng, PETSC_DECIDE, PETSC_DECIDE, Ntotal, Nint + Nsurf, NULL, Nint, NULL, P));
610: } else {
611: PetscCall(MatZeroEntries(*P));
612: }
613: PetscCall(MatSetOption(*P, MAT_ROW_ORIENTED, PETSC_FALSE));
614: PetscCall(MatDenseGetArrayRead(Xint, &rxint));
615: PetscCall(MatSetValues(*P, Nint, IIint, 6, gl, rxint, INSERT_VALUES));
616: PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
617: PetscCall(MatDenseGetArrayRead(Xsurf, &rxint));
618: PetscCall(MatSetValues(*P, Nsurf, IIsurf, 6, gl, rxint, INSERT_VALUES));
619: PetscCall(MatDenseRestoreArrayRead(Xsurf, &rxint));
620: PetscCall(MatAssemblyBegin(*P, MAT_FINAL_ASSEMBLY));
621: PetscCall(MatAssemblyEnd(*P, MAT_FINAL_ASSEMBLY));
622: PetscCall(PetscFree2(IIint, IIsurf));
624: #if defined(PETSC_USE_DEBUG_foo)
625: {
626: Vec x, y;
627: PetscScalar *yy;
628: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), Ng, PETSC_DETERMINE, &y));
629: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), PETSC_DETERMINE, Ntotal, &x));
630: PetscCall(VecSet(x, 1.0));
631: PetscCall(MatMult(*P, x, y));
632: PetscCall(VecGetArray(y, &yy));
633: for (i = 0; i < Ng; i++) PetscCheck(PetscAbsScalar(yy[i] - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong p interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(yy[i]));
634: PetscCall(VecRestoreArray(y, &yy));
635: PetscCall(VecDestroy(x));
636: PetscCall(VecDestroy(y));
637: }
638: #endif
640: PetscCall(MatDestroy(&Aii));
641: PetscCall(MatDestroy(&Ais));
642: PetscCall(MatDestroy(&Asi));
643: PetscCall(MatDestroy(&A));
644: PetscCall(ISDestroy(&is));
645: PetscCall(ISDestroy(&isint));
646: PetscCall(ISDestroy(&issurf));
647: PetscCall(MatDestroy(&Xint));
648: PetscCall(MatDestroy(&Xsurf));
649: PetscFunctionReturn(PETSC_SUCCESS);
650: }
652: /*@
653: PCExoticSetType - Sets the type of coarse grid interpolation to use
655: Logically Collective
657: Input Parameters:
658: + pc - the preconditioner context
659: - type - either PC_EXOTIC_FACE or PC_EXOTIC_WIREBASKET (defaults to face)
661: Notes:
662: The face based interpolation has 1 degree of freedom per face and ignores the
663: edge and vertex values completely in the coarse problem. For any seven point
664: stencil the interpolation of a constant on all faces into the interior is that constant.
666: The wirebasket interpolation has 1 degree of freedom per vertex, per edge and
667: per face. A constant on the subdomain boundary is interpolated as that constant
668: in the interior of the domain.
670: The coarse grid matrix is obtained via the Galerkin computation A_c = R A R^T, hence
671: if A is nonsingular A_c is also nonsingular.
673: Both interpolations are suitable for only scalar problems.
675: Level: intermediate
677: .seealso: `PCEXOTIC`, `PCExoticType()`
678: @*/
679: PetscErrorCode PCExoticSetType(PC pc, PCExoticType type)
680: {
681: PetscFunctionBegin;
684: PetscTryMethod(pc, "PCExoticSetType_C", (PC, PCExoticType), (pc, type));
685: PetscFunctionReturn(PETSC_SUCCESS);
686: }
688: static PetscErrorCode PCExoticSetType_Exotic(PC pc, PCExoticType type)
689: {
690: PC_MG *mg = (PC_MG *)pc->data;
691: PC_Exotic *ctx = (PC_Exotic *)mg->innerctx;
693: PetscFunctionBegin;
694: ctx->type = type;
695: PetscFunctionReturn(PETSC_SUCCESS);
696: }
698: PetscErrorCode PCSetUp_Exotic(PC pc)
699: {
700: Mat A;
701: PC_MG *mg = (PC_MG *)pc->data;
702: PC_Exotic *ex = (PC_Exotic *)mg->innerctx;
703: MatReuse reuse = (ex->P) ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX;
705: PetscFunctionBegin;
706: PetscCheck(pc->dm, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Need to call PCSetDM() before using this PC");
707: PetscCall(PCGetOperators(pc, NULL, &A));
708: PetscCheck(ex->type == PC_EXOTIC_FACE || ex->type == PC_EXOTIC_WIREBASKET, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unknown exotic coarse space %d", ex->type);
709: if (ex->type == PC_EXOTIC_FACE) {
710: PetscCall(DMDAGetFaceInterpolation(pc, pc->dm, ex, A, reuse, &ex->P));
711: } else /* if (ex->type == PC_EXOTIC_WIREBASKET) */ {
712: PetscCall(DMDAGetWireBasketInterpolation(pc, pc->dm, ex, A, reuse, &ex->P));
713: }
714: PetscCall(PCMGSetInterpolation(pc, 1, ex->P));
715: /* if PC has attached DM we must remove it or the PCMG will use it to compute incorrect sized vectors and interpolations */
716: PetscCall(PCSetDM(pc, NULL));
717: PetscCall(PCSetUp_MG(pc));
718: PetscFunctionReturn(PETSC_SUCCESS);
719: }
721: PetscErrorCode PCDestroy_Exotic(PC pc)
722: {
723: PC_MG *mg = (PC_MG *)pc->data;
724: PC_Exotic *ctx = (PC_Exotic *)mg->innerctx;
726: PetscFunctionBegin;
727: PetscCall(MatDestroy(&ctx->P));
728: PetscCall(KSPDestroy(&ctx->ksp));
729: PetscCall(PetscFree(ctx));
730: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCExoticSetType_C", NULL));
731: PetscCall(PCDestroy_MG(pc));
732: PetscFunctionReturn(PETSC_SUCCESS);
733: }
735: PetscErrorCode PCView_Exotic(PC pc, PetscViewer viewer)
736: {
737: PC_MG *mg = (PC_MG *)pc->data;
738: PetscBool iascii;
739: PC_Exotic *ctx = (PC_Exotic *)mg->innerctx;
741: PetscFunctionBegin;
742: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
743: if (iascii) {
744: PetscCall(PetscViewerASCIIPrintf(viewer, " Exotic type = %s\n", PCExoticTypes[ctx->type]));
745: if (ctx->directSolve) {
746: PetscCall(PetscViewerASCIIPrintf(viewer, " Using direct solver to construct interpolation\n"));
747: } else {
748: PetscViewer sviewer;
749: PetscMPIInt rank;
751: PetscCall(PetscViewerASCIIPrintf(viewer, " Using iterative solver to construct interpolation\n"));
752: PetscCall(PetscViewerASCIIPushTab(viewer));
753: PetscCall(PetscViewerASCIIPushTab(viewer)); /* should not need to push this twice? */
754: PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
755: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
756: if (rank == 0) PetscCall(KSPView(ctx->ksp, sviewer));
757: PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
758: PetscCall(PetscViewerASCIIPopTab(viewer));
759: PetscCall(PetscViewerASCIIPopTab(viewer));
760: }
761: }
762: PetscCall(PCView_MG(pc, viewer));
763: PetscFunctionReturn(PETSC_SUCCESS);
764: }
766: PetscErrorCode PCSetFromOptions_Exotic(PC pc, PetscOptionItems *PetscOptionsObject)
767: {
768: PetscBool flg;
769: PC_MG *mg = (PC_MG *)pc->data;
770: PCExoticType mgctype;
771: PC_Exotic *ctx = (PC_Exotic *)mg->innerctx;
773: PetscFunctionBegin;
774: PetscOptionsHeadBegin(PetscOptionsObject, "Exotic coarse space options");
775: PetscCall(PetscOptionsEnum("-pc_exotic_type", "face or wirebasket", "PCExoticSetType", PCExoticTypes, (PetscEnum)ctx->type, (PetscEnum *)&mgctype, &flg));
776: if (flg) PetscCall(PCExoticSetType(pc, mgctype));
777: PetscCall(PetscOptionsBool("-pc_exotic_direct_solver", "use direct solver to construct interpolation", "None", ctx->directSolve, &ctx->directSolve, NULL));
778: if (!ctx->directSolve) {
779: if (!ctx->ksp) {
780: const char *prefix;
781: PetscCall(KSPCreate(PETSC_COMM_SELF, &ctx->ksp));
782: PetscCall(KSPSetErrorIfNotConverged(ctx->ksp, pc->erroriffailure));
783: PetscCall(PetscObjectIncrementTabLevel((PetscObject)ctx->ksp, (PetscObject)pc, 1));
784: PetscCall(PCGetOptionsPrefix(pc, &prefix));
785: PetscCall(KSPSetOptionsPrefix(ctx->ksp, prefix));
786: PetscCall(KSPAppendOptionsPrefix(ctx->ksp, "exotic_"));
787: }
788: PetscCall(KSPSetFromOptions(ctx->ksp));
789: }
790: PetscOptionsHeadEnd();
791: PetscFunctionReturn(PETSC_SUCCESS);
792: }
794: /*MC
795: PCEXOTIC - Two level overlapping Schwarz preconditioner with exotic (non-standard) coarse grid spaces
797: This uses the `PCMG` infrastructure restricted to two levels and the face and wirebasket based coarse
798: grid spaces.
800: Level: advanced
802: Notes:
803: Must be used with `DMDA`
805: By default this uses `KSPGMRES` on the fine grid smoother so this should be used with `KSPFGMRES` or the smoother changed to not use `KSPGMRES`
807: References:
808: + * - These coarse grid spaces originate in the work of Bramble, Pasciak and Schatz, "The Construction
809: of Preconditioners for Elliptic Problems by Substructing IV", Mathematics of Computation, volume 53, 1989.
810: . * - They were generalized slightly in "Domain Decomposition Method for Linear Elasticity", Ph. D. thesis, Barry Smith,
811: New York University, 1990.
812: . * - They were then explored in great detail in Dryja, Smith, Widlund, "Schwarz Analysis
813: of Iterative Substructuring Methods for Elliptic Problems in Three Dimensions, SIAM Journal on Numerical
814: Analysis, volume 31. 1994. These were developed in the context of iterative substructuring preconditioners.
815: . * - They were then ingeniously applied as coarse grid spaces for overlapping Schwarz methods by Dohrmann and Widlund.
816: They refer to them as GDSW (generalized Dryja, Smith, Widlund preconditioners). See, for example,
817: Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. Extending theory for domain decomposition algorithms to irregular subdomains. In Ulrich Langer, Marco
818: Discacciati, David Keyes, Olof Widlund, and Walter Zulehner, editors, Proceedings
819: of the 17th International Conference on Domain Decomposition Methods in
820: Science and Engineering, held in Strobl, Austria, 2006, number 60 in
821: Springer Verlag, Lecture Notes in Computational Science and Engineering, 2007.
822: . * - Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. A family of energy minimizing coarse spaces for overlapping Schwarz preconditioners. In Ulrich Langer,
823: Marco Discacciati, David Keyes, Olof Widlund, and Walter Zulehner, editors, Proceedings
824: of the 17th International Conference on Domain Decomposition Methods
825: in Science and Engineering, held in Strobl, Austria, 2006, number 60 in
826: Springer Verlag, Lecture Notes in Computational Science and Engineering, 2007
827: . * - Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. Domain decomposition
828: for less regular subdomains: Overlapping Schwarz in two dimensions. SIAM J.
829: Numer. Anal., 46(4), 2008.
830: - * - Clark R. Dohrmann and Olof B. Widlund. An overlapping Schwarz
831: algorithm for almost incompressible elasticity. Technical Report
832: TR2008 912, Department of Computer Science, Courant Institute
833: of Mathematical Sciences, New York University, May 2008. URL:
835: The usual `PCMG` options are supported, such as -mg_levels_pc_type <type> -mg_coarse_pc_type <type> and -pc_mg_type <type>
837: .seealso: `PCMG`, `PCSetDM()`, `PCExoticType`, `PCExoticSetType()`
838: M*/
840: PETSC_EXTERN PetscErrorCode PCCreate_Exotic(PC pc)
841: {
842: PC_Exotic *ex;
843: PC_MG *mg;
845: PetscFunctionBegin;
846: /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */
847: PetscTryTypeMethod(pc, destroy);
848: pc->data = NULL;
850: PetscCall(PetscFree(((PetscObject)pc)->type_name));
851: ((PetscObject)pc)->type_name = NULL;
853: PetscCall(PCSetType(pc, PCMG));
854: PetscCall(PCMGSetLevels(pc, 2, NULL));
855: PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_PMAT));
856: PetscCall(PetscNew(&ex));
857: ex->type = PC_EXOTIC_FACE;
858: mg = (PC_MG *)pc->data;
859: mg->innerctx = ex;
861: pc->ops->setfromoptions = PCSetFromOptions_Exotic;
862: pc->ops->view = PCView_Exotic;
863: pc->ops->destroy = PCDestroy_Exotic;
864: pc->ops->setup = PCSetUp_Exotic;
866: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCExoticSetType_C", PCExoticSetType_Exotic));
867: PetscFunctionReturn(PETSC_SUCCESS);
868: }