Actual source code: redundant.c
1: /*
2: This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
3: */
4: #include <petsc/private/pcimpl.h>
5: #include <petscksp.h>
7: typedef struct {
8: KSP ksp;
9: PC pc; /* actual preconditioner used on each processor */
10: Vec xsub, ysub; /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
11: Vec xdup, ydup; /* parallel vector that congregates xsub or ysub facilitating vector scattering */
12: Mat pmats; /* matrix and optional preconditioner matrix belong to a subcommunicator */
13: VecScatter scatterin, scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
14: PetscBool useparallelmat;
15: PetscSubcomm psubcomm;
16: PetscInt nsubcomm; /* num of data structure PetscSubcomm */
17: PetscBool shifttypeset;
18: MatFactorShiftType shifttype;
19: } PC_Redundant;
21: PetscErrorCode PCFactorSetShiftType_Redundant(PC pc, MatFactorShiftType shifttype)
22: {
23: PC_Redundant *red = (PC_Redundant *)pc->data;
25: PetscFunctionBegin;
26: if (red->ksp) {
27: PC pc;
28: PetscCall(KSPGetPC(red->ksp, &pc));
29: PetscCall(PCFactorSetShiftType(pc, shifttype));
30: } else {
31: red->shifttypeset = PETSC_TRUE;
32: red->shifttype = shifttype;
33: }
34: PetscFunctionReturn(PETSC_SUCCESS);
35: }
37: static PetscErrorCode PCView_Redundant(PC pc, PetscViewer viewer)
38: {
39: PC_Redundant *red = (PC_Redundant *)pc->data;
40: PetscBool iascii, isstring;
41: PetscViewer subviewer;
43: PetscFunctionBegin;
44: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
45: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
46: if (iascii) {
47: if (!red->psubcomm) {
48: PetscCall(PetscViewerASCIIPrintf(viewer, " Not yet setup\n"));
49: } else {
50: PetscCall(PetscViewerASCIIPrintf(viewer, " First (color=0) of %" PetscInt_FMT " PCs follows\n", red->nsubcomm));
51: PetscCall(PetscViewerGetSubViewer(viewer, ((PetscObject)red->pc)->comm, &subviewer));
52: if (!red->psubcomm->color) { /* only view first redundant pc */
53: PetscCall(PetscViewerASCIIPushTab(subviewer));
54: PetscCall(KSPView(red->ksp, subviewer));
55: PetscCall(PetscViewerASCIIPopTab(subviewer));
56: }
57: PetscCall(PetscViewerRestoreSubViewer(viewer, ((PetscObject)red->pc)->comm, &subviewer));
58: }
59: } else if (isstring) {
60: PetscCall(PetscViewerStringSPrintf(viewer, " Redundant solver preconditioner"));
61: }
62: PetscFunctionReturn(PETSC_SUCCESS);
63: }
65: #include <../src/mat/impls/aij/mpi/mpiaij.h>
66: static PetscErrorCode PCSetUp_Redundant(PC pc)
67: {
68: PC_Redundant *red = (PC_Redundant *)pc->data;
69: PetscInt mstart, mend, mlocal, M;
70: PetscMPIInt size;
71: MPI_Comm comm, subcomm;
72: Vec x;
74: PetscFunctionBegin;
75: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
77: /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
78: PetscCallMPI(MPI_Comm_size(comm, &size));
79: if (size == 1) red->useparallelmat = PETSC_FALSE;
81: if (!pc->setupcalled) {
82: PetscInt mloc_sub;
83: if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
84: KSP ksp;
85: PetscCall(PCRedundantGetKSP(pc, &ksp));
86: }
87: subcomm = PetscSubcommChild(red->psubcomm);
89: if (red->useparallelmat) {
90: /* grab the parallel matrix and put it into the processes of a subcommunicator */
91: PetscCall(MatCreateRedundantMatrix(pc->pmat, red->psubcomm->n, subcomm, MAT_INITIAL_MATRIX, &red->pmats));
93: PetscCallMPI(MPI_Comm_size(subcomm, &size));
94: if (size > 1) {
95: PetscBool foundpack, issbaij;
96: PetscCall(PetscObjectTypeCompare((PetscObject)red->pmats, MATMPISBAIJ, &issbaij));
97: if (!issbaij) {
98: PetscCall(MatGetFactorAvailable(red->pmats, NULL, MAT_FACTOR_LU, &foundpack));
99: } else {
100: PetscCall(MatGetFactorAvailable(red->pmats, NULL, MAT_FACTOR_CHOLESKY, &foundpack));
101: }
102: if (!foundpack) { /* reset default ksp and pc */
103: PetscCall(KSPSetType(red->ksp, KSPGMRES));
104: PetscCall(PCSetType(red->pc, PCBJACOBI));
105: } else {
106: PetscCall(PCFactorSetMatSolverType(red->pc, NULL));
107: }
108: }
110: PetscCall(KSPSetOperators(red->ksp, red->pmats, red->pmats));
112: /* get working vectors xsub and ysub */
113: PetscCall(MatCreateVecs(red->pmats, &red->xsub, &red->ysub));
115: /* create working vectors xdup and ydup.
116: xdup concatenates all xsub's contigously to form a mpi vector over dupcomm (see PetscSubcommCreate_interlaced())
117: ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
118: Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
119: PetscCall(MatGetLocalSize(red->pmats, &mloc_sub, NULL));
120: PetscCall(VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm), mloc_sub, PETSC_DECIDE, &red->xdup));
121: PetscCall(VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm), 1, mloc_sub, PETSC_DECIDE, NULL, &red->ydup));
123: /* create vecscatters */
124: if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
125: IS is1, is2;
126: PetscInt *idx1, *idx2, i, j, k;
128: PetscCall(MatCreateVecs(pc->pmat, &x, NULL));
129: PetscCall(VecGetSize(x, &M));
130: PetscCall(VecGetOwnershipRange(x, &mstart, &mend));
131: mlocal = mend - mstart;
132: PetscCall(PetscMalloc2(red->psubcomm->n * mlocal, &idx1, red->psubcomm->n * mlocal, &idx2));
133: j = 0;
134: for (k = 0; k < red->psubcomm->n; k++) {
135: for (i = mstart; i < mend; i++) {
136: idx1[j] = i;
137: idx2[j++] = i + M * k;
138: }
139: }
140: PetscCall(ISCreateGeneral(comm, red->psubcomm->n * mlocal, idx1, PETSC_COPY_VALUES, &is1));
141: PetscCall(ISCreateGeneral(comm, red->psubcomm->n * mlocal, idx2, PETSC_COPY_VALUES, &is2));
142: PetscCall(VecScatterCreate(x, is1, red->xdup, is2, &red->scatterin));
143: PetscCall(ISDestroy(&is1));
144: PetscCall(ISDestroy(&is2));
146: /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
147: PetscCall(ISCreateStride(comm, mlocal, mstart + red->psubcomm->color * M, 1, &is1));
148: PetscCall(ISCreateStride(comm, mlocal, mstart, 1, &is2));
149: PetscCall(VecScatterCreate(red->xdup, is1, x, is2, &red->scatterout));
150: PetscCall(ISDestroy(&is1));
151: PetscCall(ISDestroy(&is2));
152: PetscCall(PetscFree2(idx1, idx2));
153: PetscCall(VecDestroy(&x));
154: }
155: } else { /* !red->useparallelmat */
156: PetscCall(KSPSetOperators(red->ksp, pc->mat, pc->pmat));
157: }
158: } else { /* pc->setupcalled */
159: if (red->useparallelmat) {
160: MatReuse reuse;
161: /* grab the parallel matrix and put it into the processes of a subcommunicator */
162: /*--------------------------------------------------------------------------*/
163: if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
164: /* destroy old matrices */
165: PetscCall(MatDestroy(&red->pmats));
166: reuse = MAT_INITIAL_MATRIX;
167: } else {
168: reuse = MAT_REUSE_MATRIX;
169: }
170: PetscCall(MatCreateRedundantMatrix(pc->pmat, red->psubcomm->n, PetscSubcommChild(red->psubcomm), reuse, &red->pmats));
171: PetscCall(KSPSetOperators(red->ksp, red->pmats, red->pmats));
172: } else { /* !red->useparallelmat */
173: PetscCall(KSPSetOperators(red->ksp, pc->mat, pc->pmat));
174: }
175: }
177: if (pc->setfromoptionscalled) PetscCall(KSPSetFromOptions(red->ksp));
178: PetscCall(KSPSetUp(red->ksp));
179: PetscFunctionReturn(PETSC_SUCCESS);
180: }
182: static PetscErrorCode PCApply_Redundant(PC pc, Vec x, Vec y)
183: {
184: PC_Redundant *red = (PC_Redundant *)pc->data;
185: PetscScalar *array;
187: PetscFunctionBegin;
188: if (!red->useparallelmat) {
189: PetscCall(KSPSolve(red->ksp, x, y));
190: PetscCall(KSPCheckSolve(red->ksp, pc, y));
191: PetscFunctionReturn(PETSC_SUCCESS);
192: }
194: /* scatter x to xdup */
195: PetscCall(VecScatterBegin(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
196: PetscCall(VecScatterEnd(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
198: /* place xdup's local array into xsub */
199: PetscCall(VecGetArray(red->xdup, &array));
200: PetscCall(VecPlaceArray(red->xsub, (const PetscScalar *)array));
202: /* apply preconditioner on each processor */
203: PetscCall(KSPSolve(red->ksp, red->xsub, red->ysub));
204: PetscCall(KSPCheckSolve(red->ksp, pc, red->ysub));
205: PetscCall(VecResetArray(red->xsub));
206: PetscCall(VecRestoreArray(red->xdup, &array));
208: /* place ysub's local array into ydup */
209: PetscCall(VecGetArray(red->ysub, &array));
210: PetscCall(VecPlaceArray(red->ydup, (const PetscScalar *)array));
212: /* scatter ydup to y */
213: PetscCall(VecScatterBegin(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
214: PetscCall(VecScatterEnd(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
215: PetscCall(VecResetArray(red->ydup));
216: PetscCall(VecRestoreArray(red->ysub, &array));
217: PetscFunctionReturn(PETSC_SUCCESS);
218: }
220: static PetscErrorCode PCApplyTranspose_Redundant(PC pc, Vec x, Vec y)
221: {
222: PC_Redundant *red = (PC_Redundant *)pc->data;
223: PetscScalar *array;
225: PetscFunctionBegin;
226: if (!red->useparallelmat) {
227: PetscCall(KSPSolveTranspose(red->ksp, x, y));
228: PetscCall(KSPCheckSolve(red->ksp, pc, y));
229: PetscFunctionReturn(PETSC_SUCCESS);
230: }
232: /* scatter x to xdup */
233: PetscCall(VecScatterBegin(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
234: PetscCall(VecScatterEnd(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
236: /* place xdup's local array into xsub */
237: PetscCall(VecGetArray(red->xdup, &array));
238: PetscCall(VecPlaceArray(red->xsub, (const PetscScalar *)array));
240: /* apply preconditioner on each processor */
241: PetscCall(KSPSolveTranspose(red->ksp, red->xsub, red->ysub));
242: PetscCall(KSPCheckSolve(red->ksp, pc, red->ysub));
243: PetscCall(VecResetArray(red->xsub));
244: PetscCall(VecRestoreArray(red->xdup, &array));
246: /* place ysub's local array into ydup */
247: PetscCall(VecGetArray(red->ysub, &array));
248: PetscCall(VecPlaceArray(red->ydup, (const PetscScalar *)array));
250: /* scatter ydup to y */
251: PetscCall(VecScatterBegin(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
252: PetscCall(VecScatterEnd(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
253: PetscCall(VecResetArray(red->ydup));
254: PetscCall(VecRestoreArray(red->ysub, &array));
255: PetscFunctionReturn(PETSC_SUCCESS);
256: }
258: static PetscErrorCode PCReset_Redundant(PC pc)
259: {
260: PC_Redundant *red = (PC_Redundant *)pc->data;
262: PetscFunctionBegin;
263: if (red->useparallelmat) {
264: PetscCall(VecScatterDestroy(&red->scatterin));
265: PetscCall(VecScatterDestroy(&red->scatterout));
266: PetscCall(VecDestroy(&red->ysub));
267: PetscCall(VecDestroy(&red->xsub));
268: PetscCall(VecDestroy(&red->xdup));
269: PetscCall(VecDestroy(&red->ydup));
270: }
271: PetscCall(MatDestroy(&red->pmats));
272: PetscCall(KSPReset(red->ksp));
273: PetscFunctionReturn(PETSC_SUCCESS);
274: }
276: static PetscErrorCode PCDestroy_Redundant(PC pc)
277: {
278: PC_Redundant *red = (PC_Redundant *)pc->data;
280: PetscFunctionBegin;
281: PetscCall(PCReset_Redundant(pc));
282: PetscCall(KSPDestroy(&red->ksp));
283: PetscCall(PetscSubcommDestroy(&red->psubcomm));
284: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetScatter_C", NULL));
285: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetNumber_C", NULL));
286: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetKSP_C", NULL));
287: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetOperators_C", NULL));
288: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFactorSetShiftType_C", NULL));
289: PetscCall(PetscFree(pc->data));
290: PetscFunctionReturn(PETSC_SUCCESS);
291: }
293: static PetscErrorCode PCSetFromOptions_Redundant(PC pc, PetscOptionItems *PetscOptionsObject)
294: {
295: PC_Redundant *red = (PC_Redundant *)pc->data;
297: PetscFunctionBegin;
298: PetscOptionsHeadBegin(PetscOptionsObject, "Redundant options");
299: PetscCall(PetscOptionsInt("-pc_redundant_number", "Number of redundant pc", "PCRedundantSetNumber", red->nsubcomm, &red->nsubcomm, NULL));
300: PetscOptionsHeadEnd();
301: PetscFunctionReturn(PETSC_SUCCESS);
302: }
304: static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc, PetscInt nreds)
305: {
306: PC_Redundant *red = (PC_Redundant *)pc->data;
308: PetscFunctionBegin;
309: red->nsubcomm = nreds;
310: PetscFunctionReturn(PETSC_SUCCESS);
311: }
313: /*@
314: PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
316: Logically Collective
318: Input Parameters:
319: + pc - the preconditioner context
320: - nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
321: use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
323: Level: advanced
325: .seealso: `PCREDUNDANT`
326: @*/
327: PetscErrorCode PCRedundantSetNumber(PC pc, PetscInt nredundant)
328: {
329: PetscFunctionBegin;
331: PetscCheck(nredundant > 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "num of redundant pc %" PetscInt_FMT " must be positive", nredundant);
332: PetscTryMethod(pc, "PCRedundantSetNumber_C", (PC, PetscInt), (pc, nredundant));
333: PetscFunctionReturn(PETSC_SUCCESS);
334: }
336: static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc, VecScatter in, VecScatter out)
337: {
338: PC_Redundant *red = (PC_Redundant *)pc->data;
340: PetscFunctionBegin;
341: PetscCall(PetscObjectReference((PetscObject)in));
342: PetscCall(VecScatterDestroy(&red->scatterin));
344: red->scatterin = in;
346: PetscCall(PetscObjectReference((PetscObject)out));
347: PetscCall(VecScatterDestroy(&red->scatterout));
348: red->scatterout = out;
349: PetscFunctionReturn(PETSC_SUCCESS);
350: }
352: /*@
353: PCRedundantSetScatter - Sets the scatter used to copy values into the
354: redundant local solve and the scatter to move them back into the global
355: vector.
357: Logically Collective
359: Input Parameters:
360: + pc - the preconditioner context
361: . in - the scatter to move the values in
362: - out - the scatter to move them out
364: Level: advanced
366: .seealso: `PCREDUNDANT`
367: @*/
368: PetscErrorCode PCRedundantSetScatter(PC pc, VecScatter in, VecScatter out)
369: {
370: PetscFunctionBegin;
374: PetscTryMethod(pc, "PCRedundantSetScatter_C", (PC, VecScatter, VecScatter), (pc, in, out));
375: PetscFunctionReturn(PETSC_SUCCESS);
376: }
378: static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc, KSP *innerksp)
379: {
380: PC_Redundant *red = (PC_Redundant *)pc->data;
381: MPI_Comm comm, subcomm;
382: const char *prefix;
383: PetscBool issbaij;
385: PetscFunctionBegin;
386: if (!red->psubcomm) {
387: PetscCall(PCGetOptionsPrefix(pc, &prefix));
389: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
390: PetscCall(PetscSubcommCreate(comm, &red->psubcomm));
391: PetscCall(PetscSubcommSetNumber(red->psubcomm, red->nsubcomm));
392: PetscCall(PetscSubcommSetType(red->psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
394: PetscCall(PetscSubcommSetOptionsPrefix(red->psubcomm, prefix));
395: PetscCall(PetscSubcommSetFromOptions(red->psubcomm));
397: /* create a new PC that processors in each subcomm have copy of */
398: subcomm = PetscSubcommChild(red->psubcomm);
400: PetscCall(KSPCreate(subcomm, &red->ksp));
401: PetscCall(KSPSetErrorIfNotConverged(red->ksp, pc->erroriffailure));
402: PetscCall(PetscObjectIncrementTabLevel((PetscObject)red->ksp, (PetscObject)pc, 1));
403: PetscCall(KSPSetType(red->ksp, KSPPREONLY));
404: PetscCall(KSPGetPC(red->ksp, &red->pc));
405: PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATSEQSBAIJ, &issbaij));
406: if (!issbaij) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATMPISBAIJ, &issbaij));
407: if (!issbaij) {
408: PetscCall(PCSetType(red->pc, PCLU));
409: } else {
410: PetscCall(PCSetType(red->pc, PCCHOLESKY));
411: }
412: if (red->shifttypeset) {
413: PetscCall(PCFactorSetShiftType(red->pc, red->shifttype));
414: red->shifttypeset = PETSC_FALSE;
415: }
416: PetscCall(KSPSetOptionsPrefix(red->ksp, prefix));
417: PetscCall(KSPAppendOptionsPrefix(red->ksp, "redundant_"));
418: }
419: *innerksp = red->ksp;
420: PetscFunctionReturn(PETSC_SUCCESS);
421: }
423: /*@
424: PCRedundantGetKSP - Gets the less parallel `KSP` created by the redundant `PC`.
426: Not Collective
428: Input Parameter:
429: . pc - the preconditioner context
431: Output Parameter:
432: . innerksp - the `KSP` on the smaller set of processes
434: Level: advanced
436: .seealso: `PCREDUNDANT`
437: @*/
438: PetscErrorCode PCRedundantGetKSP(PC pc, KSP *innerksp)
439: {
440: PetscFunctionBegin;
443: PetscUseMethod(pc, "PCRedundantGetKSP_C", (PC, KSP *), (pc, innerksp));
444: PetscFunctionReturn(PETSC_SUCCESS);
445: }
447: static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc, Mat *mat, Mat *pmat)
448: {
449: PC_Redundant *red = (PC_Redundant *)pc->data;
451: PetscFunctionBegin;
452: if (mat) *mat = red->pmats;
453: if (pmat) *pmat = red->pmats;
454: PetscFunctionReturn(PETSC_SUCCESS);
455: }
457: /*@
458: PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
460: Not Collective
462: Input Parameter:
463: . pc - the preconditioner context
465: Output Parameters:
466: + mat - the matrix
467: - pmat - the (possibly different) preconditioner matrix
469: Level: advanced
471: .seealso: `PCREDUNDANT`
472: @*/
473: PetscErrorCode PCRedundantGetOperators(PC pc, Mat *mat, Mat *pmat)
474: {
475: PetscFunctionBegin;
479: PetscUseMethod(pc, "PCRedundantGetOperators_C", (PC, Mat *, Mat *), (pc, mat, pmat));
480: PetscFunctionReturn(PETSC_SUCCESS);
481: }
483: /*MC
484: PCREDUNDANT - Runs a `KSP` solver with preconditioner for the entire problem on subgroups of processors
486: Options Database Key:
487: . -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
488: use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
490: Level: intermediate
492: Notes:
493: Options for the redundant preconditioners can be set using the options database prefix -redundant_
495: The default `KSP` is preonly and the default `PC` is `PCLU` or `PCCHOLESKY` if Pmat is of type `MATSBAIJ`.
497: `PCFactorSetShiftType()` applied to this `PC` will convey they shift type into the inner `PC` if it is factorization based.
499: Developer Note:
500: `PCSetInitialGuessNonzero()` is not used by this class but likely should be.
502: .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PCRedundantSetScatter()`,
503: `PCRedundantGetKSP()`, `PCRedundantGetOperators()`, `PCRedundantSetNumber()`, `PCREDISTRIBUTE`
504: M*/
506: PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
507: {
508: PC_Redundant *red;
509: PetscMPIInt size;
511: PetscFunctionBegin;
512: PetscCall(PetscNew(&red));
513: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
515: red->nsubcomm = size;
516: red->useparallelmat = PETSC_TRUE;
517: pc->data = (void *)red;
519: pc->ops->apply = PCApply_Redundant;
520: pc->ops->applytranspose = PCApplyTranspose_Redundant;
521: pc->ops->setup = PCSetUp_Redundant;
522: pc->ops->destroy = PCDestroy_Redundant;
523: pc->ops->reset = PCReset_Redundant;
524: pc->ops->setfromoptions = PCSetFromOptions_Redundant;
525: pc->ops->view = PCView_Redundant;
527: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetScatter_C", PCRedundantSetScatter_Redundant));
528: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetNumber_C", PCRedundantSetNumber_Redundant));
529: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetKSP_C", PCRedundantGetKSP_Redundant));
530: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetOperators_C", PCRedundantGetOperators_Redundant));
531: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFactorSetShiftType_C", PCFactorSetShiftType_Redundant));
532: PetscFunctionReturn(PETSC_SUCCESS);
533: }