Actual source code: pcis.c


  2: #include <petsc/private/pcisimpl.h>

  4: static PetscErrorCode PCISSetUseStiffnessScaling_IS(PC pc, PetscBool use)
  5: {
  6:   PC_IS *pcis = (PC_IS *)pc->data;

  8:   PetscFunctionBegin;
  9:   pcis->use_stiffness_scaling = use;
 10:   PetscFunctionReturn(PETSC_SUCCESS);
 11: }

 13: /*@
 14:    PCISSetUseStiffnessScaling - Tells `PCIS` to construct partition of unity using
 15:                               the local matrices' diagonal entries

 17:    Logically Collective

 19:    Input Parameters:
 20: +  pc - the preconditioning context
 21: -  use - whether or not it should use matrix diagonal to build partition of unity.

 23:    Level: intermediate

 25:    Developer Note:
 26:    There is no manual page for `PCIS` nor some of its methods

 28: .seealso: `PCIS`, `PCBDDC`
 29: @*/
 30: PetscErrorCode PCISSetUseStiffnessScaling(PC pc, PetscBool use)
 31: {
 32:   PetscFunctionBegin;
 35:   PetscTryMethod(pc, "PCISSetUseStiffnessScaling_C", (PC, PetscBool), (pc, use));
 36:   PetscFunctionReturn(PETSC_SUCCESS);
 37: }

 39: static PetscErrorCode PCISSetSubdomainDiagonalScaling_IS(PC pc, Vec scaling_factors)
 40: {
 41:   PC_IS *pcis = (PC_IS *)pc->data;

 43:   PetscFunctionBegin;
 44:   PetscCall(PetscObjectReference((PetscObject)scaling_factors));
 45:   PetscCall(VecDestroy(&pcis->D));
 46:   pcis->D = scaling_factors;
 47:   if (pc->setupcalled) {
 48:     PetscInt sn;

 50:     PetscCall(VecGetSize(pcis->D, &sn));
 51:     if (sn == pcis->n) {
 52:       PetscCall(VecScatterBegin(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
 53:       PetscCall(VecScatterEnd(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
 54:       PetscCall(VecDestroy(&pcis->D));
 55:       PetscCall(VecDuplicate(pcis->vec1_B, &pcis->D));
 56:       PetscCall(VecCopy(pcis->vec1_B, pcis->D));
 57:     } else PetscCheck(sn == pcis->n_B, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Invalid size for scaling vector. Expected %" PetscInt_FMT " (or full %" PetscInt_FMT "), found %" PetscInt_FMT, pcis->n_B, pcis->n, sn);
 58:   }
 59:   PetscFunctionReturn(PETSC_SUCCESS);
 60: }

 62: /*@
 63:    PCISSetSubdomainDiagonalScaling - Set diagonal scaling for `PCIS`.

 65:    Logically Collective

 67:    Input Parameters:
 68: +  pc - the preconditioning context
 69: -  scaling_factors - scaling factors for the subdomain

 71:    Level: intermediate

 73:    Note:
 74:    Intended for use with jumping coefficients cases.

 76:    Developer Note:
 77:    There is no manual page for `PCIS` nor some of its methods

 79: .seealso: `PCIS`, `PCBDDC`
 80: @*/
 81: PetscErrorCode PCISSetSubdomainDiagonalScaling(PC pc, Vec scaling_factors)
 82: {
 83:   PetscFunctionBegin;
 86:   PetscTryMethod(pc, "PCISSetSubdomainDiagonalScaling_C", (PC, Vec), (pc, scaling_factors));
 87:   PetscFunctionReturn(PETSC_SUCCESS);
 88: }

 90: static PetscErrorCode PCISSetSubdomainScalingFactor_IS(PC pc, PetscScalar scal)
 91: {
 92:   PC_IS *pcis = (PC_IS *)pc->data;

 94:   PetscFunctionBegin;
 95:   pcis->scaling_factor = scal;
 96:   if (pcis->D) PetscCall(VecSet(pcis->D, pcis->scaling_factor));
 97:   PetscFunctionReturn(PETSC_SUCCESS);
 98: }

100: /*@
101:  PCISSetSubdomainScalingFactor - Set scaling factor for `PCIS`.

103:    Not Collective

105:    Input Parameters:
106: +  pc - the preconditioning context
107: -  scal - scaling factor for the subdomain

109:    Level: intermediate

111:    Note:
112:    Intended for use with the jumping coefficients cases.

114:    Developer Note:
115:    There is no manual page for `PCIS` nor some of its methods

117: .seealso: `PCIS`, `PCBDDC`
118: @*/
119: PetscErrorCode PCISSetSubdomainScalingFactor(PC pc, PetscScalar scal)
120: {
121:   PetscFunctionBegin;
123:   PetscTryMethod(pc, "PCISSetSubdomainScalingFactor_C", (PC, PetscScalar), (pc, scal));
124:   PetscFunctionReturn(PETSC_SUCCESS);
125: }

127: PetscErrorCode PCISSetUp(PC pc, PetscBool computematrices, PetscBool computesolvers)
128: {
129:   PC_IS    *pcis = (PC_IS *)(pc->data);
130:   Mat_IS   *matis;
131:   MatReuse  reuse;
132:   PetscBool flg, issbaij;

134:   PetscFunctionBegin;
135:   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATIS, &flg));
136:   PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Requires preconditioning matrix of type MATIS");
137:   matis = (Mat_IS *)pc->pmat->data;
138:   if (pc->useAmat) {
139:     PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATIS, &flg));
140:     PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Requires linear system matrix of type MATIS");
141:   }

143:   /* first time creation, get info on substructuring */
144:   if (!pc->setupcalled) {
145:     PetscInt  n_I;
146:     PetscInt *idx_I_local, *idx_B_local, *idx_I_global, *idx_B_global;
147:     PetscBT   bt;
148:     PetscInt  i, j;

150:     /* get info on mapping */
151:     PetscCall(PetscObjectReference((PetscObject)matis->rmapping));
152:     PetscCall(ISLocalToGlobalMappingDestroy(&pcis->mapping));
153:     pcis->mapping = matis->rmapping;
154:     PetscCall(ISLocalToGlobalMappingGetSize(pcis->mapping, &pcis->n));
155:     PetscCall(ISLocalToGlobalMappingGetInfo(pcis->mapping, &(pcis->n_neigh), &(pcis->neigh), &(pcis->n_shared), &(pcis->shared)));

157:     /* Identifying interior and interface nodes, in local numbering */
158:     PetscCall(PetscBTCreate(pcis->n, &bt));
159:     for (i = 0; i < pcis->n_neigh; i++)
160:       for (j = 0; j < pcis->n_shared[i]; j++) PetscCall(PetscBTSet(bt, pcis->shared[i][j]));

162:     /* Creating local and global index sets for interior and interface nodes. */
163:     PetscCall(PetscMalloc1(pcis->n, &idx_I_local));
164:     PetscCall(PetscMalloc1(pcis->n, &idx_B_local));
165:     for (i = 0, pcis->n_B = 0, n_I = 0; i < pcis->n; i++) {
166:       if (!PetscBTLookup(bt, i)) {
167:         idx_I_local[n_I] = i;
168:         n_I++;
169:       } else {
170:         idx_B_local[pcis->n_B] = i;
171:         pcis->n_B++;
172:       }
173:     }

175:     /* Getting the global numbering */
176:     idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */
177:     idx_I_global = idx_B_local + pcis->n_B;
178:     PetscCall(ISLocalToGlobalMappingApply(pcis->mapping, pcis->n_B, idx_B_local, idx_B_global));
179:     PetscCall(ISLocalToGlobalMappingApply(pcis->mapping, n_I, idx_I_local, idx_I_global));

181:     /* Creating the index sets */
182:     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, pcis->n_B, idx_B_local, PETSC_COPY_VALUES, &pcis->is_B_local));
183:     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), pcis->n_B, idx_B_global, PETSC_COPY_VALUES, &pcis->is_B_global));
184:     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, n_I, idx_I_local, PETSC_COPY_VALUES, &pcis->is_I_local));
185:     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), n_I, idx_I_global, PETSC_COPY_VALUES, &pcis->is_I_global));

187:     /* Freeing memory */
188:     PetscCall(PetscFree(idx_B_local));
189:     PetscCall(PetscFree(idx_I_local));
190:     PetscCall(PetscBTDestroy(&bt));

192:     /* Creating work vectors and arrays */
193:     PetscCall(VecDuplicate(matis->x, &pcis->vec1_N));
194:     PetscCall(VecDuplicate(pcis->vec1_N, &pcis->vec2_N));
195:     PetscCall(VecCreate(PETSC_COMM_SELF, &pcis->vec1_D));
196:     PetscCall(VecSetSizes(pcis->vec1_D, pcis->n - pcis->n_B, PETSC_DECIDE));
197:     PetscCall(VecSetType(pcis->vec1_D, ((PetscObject)pcis->vec1_N)->type_name));
198:     PetscCall(VecDuplicate(pcis->vec1_D, &pcis->vec2_D));
199:     PetscCall(VecDuplicate(pcis->vec1_D, &pcis->vec3_D));
200:     PetscCall(VecDuplicate(pcis->vec1_D, &pcis->vec4_D));
201:     PetscCall(VecCreate(PETSC_COMM_SELF, &pcis->vec1_B));
202:     PetscCall(VecSetSizes(pcis->vec1_B, pcis->n_B, PETSC_DECIDE));
203:     PetscCall(VecSetType(pcis->vec1_B, ((PetscObject)pcis->vec1_N)->type_name));
204:     PetscCall(VecDuplicate(pcis->vec1_B, &pcis->vec2_B));
205:     PetscCall(VecDuplicate(pcis->vec1_B, &pcis->vec3_B));
206:     PetscCall(MatCreateVecs(pc->pmat, &pcis->vec1_global, NULL));
207:     PetscCall(PetscMalloc1(pcis->n, &pcis->work_N));
208:     /* scaling vector */
209:     if (!pcis->D) { /* it can happen that the user passed in a scaling vector via PCISSetSubdomainDiagonalScaling */
210:       PetscCall(VecDuplicate(pcis->vec1_B, &pcis->D));
211:       PetscCall(VecSet(pcis->D, pcis->scaling_factor));
212:     }

214:     /* Creating the scatter contexts */
215:     PetscCall(VecScatterCreate(pcis->vec1_N, pcis->is_I_local, pcis->vec1_D, (IS)0, &pcis->N_to_D));
216:     PetscCall(VecScatterCreate(pcis->vec1_global, pcis->is_I_global, pcis->vec1_D, (IS)0, &pcis->global_to_D));
217:     PetscCall(VecScatterCreate(pcis->vec1_N, pcis->is_B_local, pcis->vec1_B, (IS)0, &pcis->N_to_B));
218:     PetscCall(VecScatterCreate(pcis->vec1_global, pcis->is_B_global, pcis->vec1_B, (IS)0, &pcis->global_to_B));

220:     /* map from boundary to local */
221:     PetscCall(ISLocalToGlobalMappingCreateIS(pcis->is_B_local, &pcis->BtoNmap));
222:   }

224:   {
225:     PetscInt sn;

227:     PetscCall(VecGetSize(pcis->D, &sn));
228:     if (sn == pcis->n) {
229:       PetscCall(VecScatterBegin(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
230:       PetscCall(VecScatterEnd(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
231:       PetscCall(VecDestroy(&pcis->D));
232:       PetscCall(VecDuplicate(pcis->vec1_B, &pcis->D));
233:       PetscCall(VecCopy(pcis->vec1_B, pcis->D));
234:     } else PetscCheck(sn == pcis->n_B, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Invalid size for scaling vector. Expected %" PetscInt_FMT " (or full %" PetscInt_FMT "), found %" PetscInt_FMT, pcis->n_B, pcis->n, sn);
235:   }

237:   /*
238:     Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
239:     is such that interior nodes come first than the interface ones, we have

241:         [ A_II | A_IB ]
242:     A = [------+------]
243:         [ A_BI | A_BB ]
244:   */
245:   if (computematrices) {
246:     PetscBool amat = (PetscBool)(pc->mat != pc->pmat && pc->useAmat);
247:     PetscInt  bs, ibs;

249:     reuse = MAT_INITIAL_MATRIX;
250:     if (pcis->reusesubmatrices && pc->setupcalled) {
251:       if (pc->flag == SAME_NONZERO_PATTERN) {
252:         reuse = MAT_REUSE_MATRIX;
253:       } else {
254:         reuse = MAT_INITIAL_MATRIX;
255:       }
256:     }
257:     if (reuse == MAT_INITIAL_MATRIX) {
258:       PetscCall(MatDestroy(&pcis->A_II));
259:       PetscCall(MatDestroy(&pcis->pA_II));
260:       PetscCall(MatDestroy(&pcis->A_IB));
261:       PetscCall(MatDestroy(&pcis->A_BI));
262:       PetscCall(MatDestroy(&pcis->A_BB));
263:     }

265:     PetscCall(ISLocalToGlobalMappingGetBlockSize(pcis->mapping, &ibs));
266:     PetscCall(MatGetBlockSize(matis->A, &bs));
267:     PetscCall(MatCreateSubMatrix(matis->A, pcis->is_I_local, pcis->is_I_local, reuse, &pcis->pA_II));
268:     if (amat) {
269:       Mat_IS *amatis = (Mat_IS *)pc->mat->data;
270:       PetscCall(MatCreateSubMatrix(amatis->A, pcis->is_I_local, pcis->is_I_local, reuse, &pcis->A_II));
271:     } else {
272:       PetscCall(PetscObjectReference((PetscObject)pcis->pA_II));
273:       PetscCall(MatDestroy(&pcis->A_II));
274:       pcis->A_II = pcis->pA_II;
275:     }
276:     PetscCall(MatSetBlockSize(pcis->A_II, bs == ibs ? bs : 1));
277:     PetscCall(MatSetBlockSize(pcis->pA_II, bs == ibs ? bs : 1));
278:     PetscCall(MatCreateSubMatrix(matis->A, pcis->is_B_local, pcis->is_B_local, reuse, &pcis->A_BB));
279:     PetscCall(PetscObjectTypeCompare((PetscObject)matis->A, MATSEQSBAIJ, &issbaij));
280:     if (!issbaij) {
281:       PetscCall(MatCreateSubMatrix(matis->A, pcis->is_I_local, pcis->is_B_local, reuse, &pcis->A_IB));
282:       PetscCall(MatCreateSubMatrix(matis->A, pcis->is_B_local, pcis->is_I_local, reuse, &pcis->A_BI));
283:     } else {
284:       Mat newmat;

286:       PetscCall(MatConvert(matis->A, MATSEQBAIJ, MAT_INITIAL_MATRIX, &newmat));
287:       PetscCall(MatCreateSubMatrix(newmat, pcis->is_I_local, pcis->is_B_local, reuse, &pcis->A_IB));
288:       PetscCall(MatCreateSubMatrix(newmat, pcis->is_B_local, pcis->is_I_local, reuse, &pcis->A_BI));
289:       PetscCall(MatDestroy(&newmat));
290:     }
291:     PetscCall(MatSetBlockSize(pcis->A_BB, bs == ibs ? bs : 1));
292:   }

294:   /* Creating scaling vector D */
295:   PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_is_use_stiffness_scaling", &pcis->use_stiffness_scaling, NULL));
296:   if (pcis->use_stiffness_scaling) {
297:     PetscScalar *a;
298:     PetscInt     i, n;

300:     if (pcis->A_BB) {
301:       PetscCall(MatGetDiagonal(pcis->A_BB, pcis->D));
302:     } else {
303:       PetscCall(MatGetDiagonal(matis->A, pcis->vec1_N));
304:       PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_N, pcis->D, INSERT_VALUES, SCATTER_FORWARD));
305:       PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_N, pcis->D, INSERT_VALUES, SCATTER_FORWARD));
306:     }
307:     PetscCall(VecAbs(pcis->D));
308:     PetscCall(VecGetLocalSize(pcis->D, &n));
309:     PetscCall(VecGetArray(pcis->D, &a));
310:     for (i = 0; i < n; i++)
311:       if (PetscAbsScalar(a[i]) < PETSC_SMALL) a[i] = 1.0;
312:     PetscCall(VecRestoreArray(pcis->D, &a));
313:   }
314:   PetscCall(VecSet(pcis->vec1_global, 0.0));
315:   PetscCall(VecScatterBegin(pcis->global_to_B, pcis->D, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
316:   PetscCall(VecScatterEnd(pcis->global_to_B, pcis->D, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
317:   PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_global, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
318:   PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_global, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
319:   PetscCall(VecPointwiseDivide(pcis->D, pcis->D, pcis->vec1_B));
320:   /* See historical note 01, at the bottom of this file. */

322:   /* Creating the KSP contexts for the local Dirichlet and Neumann problems */
323:   if (computesolvers) {
324:     PC pc_ctx;

326:     pcis->pure_neumann = matis->pure_neumann;
327:     /* Dirichlet */
328:     PetscCall(KSPCreate(PETSC_COMM_SELF, &pcis->ksp_D));
329:     PetscCall(KSPSetErrorIfNotConverged(pcis->ksp_D, pc->erroriffailure));
330:     PetscCall(PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_D, (PetscObject)pc, 1));
331:     PetscCall(KSPSetOperators(pcis->ksp_D, pcis->A_II, pcis->A_II));
332:     PetscCall(KSPSetOptionsPrefix(pcis->ksp_D, "is_localD_"));
333:     PetscCall(KSPGetPC(pcis->ksp_D, &pc_ctx));
334:     PetscCall(PCSetType(pc_ctx, PCLU));
335:     PetscCall(KSPSetType(pcis->ksp_D, KSPPREONLY));
336:     PetscCall(KSPSetFromOptions(pcis->ksp_D));
337:     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
338:     PetscCall(KSPSetUp(pcis->ksp_D));
339:     /* Neumann */
340:     PetscCall(KSPCreate(PETSC_COMM_SELF, &pcis->ksp_N));
341:     PetscCall(KSPSetErrorIfNotConverged(pcis->ksp_N, pc->erroriffailure));
342:     PetscCall(PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_N, (PetscObject)pc, 1));
343:     PetscCall(KSPSetOperators(pcis->ksp_N, matis->A, matis->A));
344:     PetscCall(KSPSetOptionsPrefix(pcis->ksp_N, "is_localN_"));
345:     PetscCall(KSPGetPC(pcis->ksp_N, &pc_ctx));
346:     PetscCall(PCSetType(pc_ctx, PCLU));
347:     PetscCall(KSPSetType(pcis->ksp_N, KSPPREONLY));
348:     PetscCall(KSPSetFromOptions(pcis->ksp_N));
349:     {
350:       PetscBool damp_fixed = PETSC_FALSE, remove_nullspace_fixed = PETSC_FALSE, set_damping_factor_floating = PETSC_FALSE, not_damp_floating = PETSC_FALSE, not_remove_nullspace_floating = PETSC_FALSE;
351:       PetscReal fixed_factor, floating_factor;

353:       PetscCall(PetscOptionsGetReal(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_damp_fixed", &fixed_factor, &damp_fixed));
354:       if (!damp_fixed) fixed_factor = 0.0;
355:       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_damp_fixed", &damp_fixed, NULL));

357:       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_remove_nullspace_fixed", &remove_nullspace_fixed, NULL));

359:       PetscCall(PetscOptionsGetReal(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_set_damping_factor_floating", &floating_factor, &set_damping_factor_floating));
360:       if (!set_damping_factor_floating) floating_factor = 0.0;
361:       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_set_damping_factor_floating", &set_damping_factor_floating, NULL));
362:       if (!set_damping_factor_floating) floating_factor = 1.e-12;

364:       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_not_damp_floating", &not_damp_floating, NULL));

366:       PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_not_remove_nullspace_floating", &not_remove_nullspace_floating, NULL));

368:       if (pcis->pure_neumann) { /* floating subdomain */
369:         if (!(not_damp_floating)) {
370:           PetscCall(PCFactorSetShiftType(pc_ctx, MAT_SHIFT_NONZERO));
371:           PetscCall(PCFactorSetShiftAmount(pc_ctx, floating_factor));
372:         }
373:         if (!(not_remove_nullspace_floating)) {
374:           MatNullSpace nullsp;
375:           PetscCall(MatNullSpaceCreate(PETSC_COMM_SELF, PETSC_TRUE, 0, NULL, &nullsp));
376:           PetscCall(MatSetNullSpace(matis->A, nullsp));
377:           PetscCall(MatNullSpaceDestroy(&nullsp));
378:         }
379:       } else { /* fixed subdomain */
380:         if (damp_fixed) {
381:           PetscCall(PCFactorSetShiftType(pc_ctx, MAT_SHIFT_NONZERO));
382:           PetscCall(PCFactorSetShiftAmount(pc_ctx, floating_factor));
383:         }
384:         if (remove_nullspace_fixed) {
385:           MatNullSpace nullsp;
386:           PetscCall(MatNullSpaceCreate(PETSC_COMM_SELF, PETSC_TRUE, 0, NULL, &nullsp));
387:           PetscCall(MatSetNullSpace(matis->A, nullsp));
388:           PetscCall(MatNullSpaceDestroy(&nullsp));
389:         }
390:       }
391:     }
392:     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
393:     PetscCall(KSPSetUp(pcis->ksp_N));
394:   }
395:   PetscFunctionReturn(PETSC_SUCCESS);
396: }

398: /*
399:    PCISDestroy -
400: */
401: PetscErrorCode PCISDestroy(PC pc)
402: {
403:   PC_IS *pcis;

405:   PetscFunctionBegin;
406:   if (!pc) PetscFunctionReturn(PETSC_SUCCESS);
407:   pcis = (PC_IS *)(pc->data);
408:   PetscCall(ISDestroy(&pcis->is_B_local));
409:   PetscCall(ISDestroy(&pcis->is_I_local));
410:   PetscCall(ISDestroy(&pcis->is_B_global));
411:   PetscCall(ISDestroy(&pcis->is_I_global));
412:   PetscCall(MatDestroy(&pcis->A_II));
413:   PetscCall(MatDestroy(&pcis->pA_II));
414:   PetscCall(MatDestroy(&pcis->A_IB));
415:   PetscCall(MatDestroy(&pcis->A_BI));
416:   PetscCall(MatDestroy(&pcis->A_BB));
417:   PetscCall(VecDestroy(&pcis->D));
418:   PetscCall(KSPDestroy(&pcis->ksp_N));
419:   PetscCall(KSPDestroy(&pcis->ksp_D));
420:   PetscCall(VecDestroy(&pcis->vec1_N));
421:   PetscCall(VecDestroy(&pcis->vec2_N));
422:   PetscCall(VecDestroy(&pcis->vec1_D));
423:   PetscCall(VecDestroy(&pcis->vec2_D));
424:   PetscCall(VecDestroy(&pcis->vec3_D));
425:   PetscCall(VecDestroy(&pcis->vec4_D));
426:   PetscCall(VecDestroy(&pcis->vec1_B));
427:   PetscCall(VecDestroy(&pcis->vec2_B));
428:   PetscCall(VecDestroy(&pcis->vec3_B));
429:   PetscCall(VecDestroy(&pcis->vec1_global));
430:   PetscCall(VecScatterDestroy(&pcis->global_to_D));
431:   PetscCall(VecScatterDestroy(&pcis->N_to_B));
432:   PetscCall(VecScatterDestroy(&pcis->N_to_D));
433:   PetscCall(VecScatterDestroy(&pcis->global_to_B));
434:   PetscCall(PetscFree(pcis->work_N));
435:   if (pcis->n_neigh > -1) PetscCall(ISLocalToGlobalMappingRestoreInfo(pcis->mapping, &(pcis->n_neigh), &(pcis->neigh), &(pcis->n_shared), &(pcis->shared)));
436:   PetscCall(ISLocalToGlobalMappingDestroy(&pcis->mapping));
437:   PetscCall(ISLocalToGlobalMappingDestroy(&pcis->BtoNmap));
438:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetUseStiffnessScaling_C", NULL));
439:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainScalingFactor_C", NULL));
440:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainDiagonalScaling_C", NULL));
441:   PetscFunctionReturn(PETSC_SUCCESS);
442: }

444: /*
445:    PCISCreate -
446: */
447: PetscErrorCode PCISCreate(PC pc)
448: {
449:   PC_IS *pcis = (PC_IS *)(pc->data);

451:   PetscFunctionBegin;
452:   if (!pcis) {
453:     PetscCall(PetscNew(&pcis));
454:     pc->data = pcis;
455:   }
456:   pcis->n_neigh          = -1;
457:   pcis->scaling_factor   = 1.0;
458:   pcis->reusesubmatrices = PETSC_TRUE;
459:   /* composing functions */
460:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetUseStiffnessScaling_C", PCISSetUseStiffnessScaling_IS));
461:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainScalingFactor_C", PCISSetSubdomainScalingFactor_IS));
462:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainDiagonalScaling_C", PCISSetSubdomainDiagonalScaling_IS));
463:   PetscFunctionReturn(PETSC_SUCCESS);
464: }

466: /*
467:    PCISApplySchur -

469:    Input parameters:
470: .  pc - preconditioner context
471: .  v - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null)

473:    Output parameters:
474: .  vec1_B - result of Schur complement applied to chunk
475: .  vec2_B - garbage (used as work space), or null (and v is used as workspace)
476: .  vec1_D - garbage (used as work space)
477: .  vec2_D - garbage (used as work space)

479: */
480: PetscErrorCode PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D)
481: {
482:   PC_IS *pcis = (PC_IS *)(pc->data);

484:   PetscFunctionBegin;
485:   if (!vec2_B) vec2_B = v;

487:   PetscCall(MatMult(pcis->A_BB, v, vec1_B));
488:   PetscCall(MatMult(pcis->A_IB, v, vec1_D));
489:   PetscCall(KSPSolve(pcis->ksp_D, vec1_D, vec2_D));
490:   PetscCall(KSPCheckSolve(pcis->ksp_D, pc, vec2_D));
491:   PetscCall(MatMult(pcis->A_BI, vec2_D, vec2_B));
492:   PetscCall(VecAXPY(vec1_B, -1.0, vec2_B));
493:   PetscFunctionReturn(PETSC_SUCCESS);
494: }

496: /*
497:    PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface,
498:    including ghosts) into an interface vector, when in SCATTER_FORWARD mode, or vice-versa, when in SCATTER_REVERSE
499:    mode.

501:    Input parameters:
502: .  pc - preconditioner context
503: .  array_N - [when in SCATTER_FORWARD mode] Array to be scattered into the vector
504: .  v_B - [when in SCATTER_REVERSE mode] Vector to be scattered into the array

506:    Output parameter:
507: .  array_N - [when in SCATTER_REVERSE mode] Array to receive the scattered vector
508: .  v_B - [when in SCATTER_FORWARD mode] Vector to receive the scattered array

510:    Note:
511:    The entries in the array that do not correspond to interface nodes remain unaltered.
512: */
513: PetscErrorCode PCISScatterArrayNToVecB(PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc)
514: {
515:   PetscInt        i;
516:   const PetscInt *idex;
517:   PetscScalar    *array_B;
518:   PC_IS          *pcis = (PC_IS *)(pc->data);

520:   PetscFunctionBegin;
521:   PetscCall(VecGetArray(v_B, &array_B));
522:   PetscCall(ISGetIndices(pcis->is_B_local, &idex));

524:   if (smode == SCATTER_FORWARD) {
525:     if (imode == INSERT_VALUES) {
526:       for (i = 0; i < pcis->n_B; i++) array_B[i] = array_N[idex[i]];
527:     } else { /* ADD_VALUES */
528:       for (i = 0; i < pcis->n_B; i++) array_B[i] += array_N[idex[i]];
529:     }
530:   } else { /* SCATTER_REVERSE */
531:     if (imode == INSERT_VALUES) {
532:       for (i = 0; i < pcis->n_B; i++) array_N[idex[i]] = array_B[i];
533:     } else { /* ADD_VALUES */
534:       for (i = 0; i < pcis->n_B; i++) array_N[idex[i]] += array_B[i];
535:     }
536:   }
537:   PetscCall(ISRestoreIndices(pcis->is_B_local, &idex));
538:   PetscCall(VecRestoreArray(v_B, &array_B));
539:   PetscFunctionReturn(PETSC_SUCCESS);
540: }

542: /*
543:    PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement.
544:    More precisely, solves the problem:
545:                                         [ A_II  A_IB ] [ . ]   [ 0 ]
546:                                         [            ] [   ] = [   ]
547:                                         [ A_BI  A_BB ] [ x ]   [ b ]

549:    Input parameters:
550: .  pc - preconditioner context
551: .  b - vector of local interface nodes (including ghosts)

553:    Output parameters:
554: .  x - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur
555:        complement to b
556: .  vec1_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
557: .  vec2_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)

559: */
560: PetscErrorCode PCISApplyInvSchur(PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N)
561: {
562:   PC_IS *pcis = (PC_IS *)(pc->data);

564:   PetscFunctionBegin;
565:   /*
566:     Neumann solvers.
567:     Applying the inverse of the local Schur complement, i.e, solving a Neumann
568:     Problem with zero at the interior nodes of the RHS and extracting the interface
569:     part of the solution. inverse Schur complement is applied to b and the result
570:     is stored in x.
571:   */
572:   /* Setting the RHS vec1_N */
573:   PetscCall(VecSet(vec1_N, 0.0));
574:   PetscCall(VecScatterBegin(pcis->N_to_B, b, vec1_N, INSERT_VALUES, SCATTER_REVERSE));
575:   PetscCall(VecScatterEnd(pcis->N_to_B, b, vec1_N, INSERT_VALUES, SCATTER_REVERSE));
576:   /* Checking for consistency of the RHS */
577:   {
578:     PetscBool flg = PETSC_FALSE;
579:     PetscCall(PetscOptionsGetBool(NULL, NULL, "-pc_is_check_consistency", &flg, NULL));
580:     if (flg) {
581:       PetscScalar average;
582:       PetscViewer viewer;
583:       PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc), &viewer));

585:       PetscCall(VecSum(vec1_N, &average));
586:       average = average / ((PetscReal)pcis->n);
587:       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
588:       if (pcis->pure_neumann) {
589:         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Subdomain %04d is floating. Average = % 1.14e\n", PetscGlobalRank, (double)PetscAbsScalar(average)));
590:       } else {
591:         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Subdomain %04d is fixed.    Average = % 1.14e\n", PetscGlobalRank, (double)PetscAbsScalar(average)));
592:       }
593:       PetscCall(PetscViewerFlush(viewer));
594:       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
595:     }
596:   }
597:   /* Solving the system for vec2_N */
598:   PetscCall(KSPSolve(pcis->ksp_N, vec1_N, vec2_N));
599:   PetscCall(KSPCheckSolve(pcis->ksp_N, pc, vec2_N));
600:   /* Extracting the local interface vector out of the solution */
601:   PetscCall(VecScatterBegin(pcis->N_to_B, vec2_N, x, INSERT_VALUES, SCATTER_FORWARD));
602:   PetscCall(VecScatterEnd(pcis->N_to_B, vec2_N, x, INSERT_VALUES, SCATTER_FORWARD));
603:   PetscFunctionReturn(PETSC_SUCCESS);
604: }