Actual source code: scotch.c


  2: #include <../src/mat/impls/adj/mpi/mpiadj.h>

  4: EXTERN_C_BEGIN
  5: #include <ptscotch.h>
  6: #if defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
  7: /* we define the prototype instead of include SCOTCH's parmetis.h */
  8: void SCOTCH_ParMETIS_V3_NodeND(const SCOTCH_Num *const, SCOTCH_Num *const, SCOTCH_Num *const, const SCOTCH_Num *const, const SCOTCH_Num *const, SCOTCH_Num *const, SCOTCH_Num *const, MPI_Comm *);
  9: #endif
 10: EXTERN_C_END

 12: typedef struct {
 13:   double     imbalance;
 14:   SCOTCH_Num strategy;
 15: } MatPartitioning_PTScotch;

 17: /*@
 18:    MatPartitioningPTScotchSetImbalance - Sets the value of the load imbalance
 19:    ratio to be used during strategy selection.

 21:    Collective

 23:    Input Parameters:
 24: +  part - the partitioning context
 25: -  imb  - the load imbalance ratio

 27:    Options Database Key:
 28: .  -mat_partitioning_ptscotch_imbalance <imb> - set load imbalance ratio

 30:    Note:
 31:    Must be in the range [0,1]. The default value is 0.01.

 33:    Level: advanced

 35: .seealso: `MATPARTITIONINGSCOTCH`, `MatPartitioningPTScotchSetStrategy()`, `MatPartitioningPTScotchGetImbalance()`
 36: @*/
 37: PetscErrorCode MatPartitioningPTScotchSetImbalance(MatPartitioning part, PetscReal imb)
 38: {
 39:   PetscFunctionBegin;
 42:   PetscTryMethod(part, "MatPartitioningPTScotchSetImbalance_C", (MatPartitioning, PetscReal), (part, imb));
 43:   PetscFunctionReturn(PETSC_SUCCESS);
 44: }

 46: PetscErrorCode MatPartitioningPTScotchSetImbalance_PTScotch(MatPartitioning part, PetscReal imb)
 47: {
 48:   MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;

 50:   PetscFunctionBegin;
 51:   if (imb == PETSC_DEFAULT) scotch->imbalance = 0.01;
 52:   else {
 53:     PetscCheck(imb >= 0.0 && imb <= 1.0, PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_OUTOFRANGE, "Illegal value of imb. Must be in range [0,1]");
 54:     scotch->imbalance = (double)imb;
 55:   }
 56:   PetscFunctionReturn(PETSC_SUCCESS);
 57: }

 59: /*@
 60:    MatPartitioningPTScotchGetImbalance - Gets the value of the load imbalance
 61:    ratio used during strategy selection.

 63:    Not Collective

 65:    Input Parameter:
 66: .  part - the partitioning context

 68:    Output Parameter:
 69: .  imb  - the load imbalance ratio

 71:    Level: advanced

 73: .seealso: `MATPARTITIONINGSCOTCH`, `MatPartitioningPTScotchSetImbalance()`
 74: @*/
 75: PetscErrorCode MatPartitioningPTScotchGetImbalance(MatPartitioning part, PetscReal *imb)
 76: {
 77:   PetscFunctionBegin;
 80:   PetscUseMethod(part, "MatPartitioningPTScotchGetImbalance_C", (MatPartitioning, PetscReal *), (part, imb));
 81:   PetscFunctionReturn(PETSC_SUCCESS);
 82: }

 84: PetscErrorCode MatPartitioningPTScotchGetImbalance_PTScotch(MatPartitioning part, PetscReal *imb)
 85: {
 86:   MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;

 88:   PetscFunctionBegin;
 89:   *imb = scotch->imbalance;
 90:   PetscFunctionReturn(PETSC_SUCCESS);
 91: }

 93: /*@
 94:    MatPartitioningPTScotchSetStrategy - Sets the strategy to be used in PTScotch.

 96:    Collective

 98:    Input Parameters:
 99: +  part - the partitioning context
100: -  strategy - the strategy, one of
101: .vb
102:      MP_PTSCOTCH_DEFAULT     - Default behavior
103:      MP_PTSCOTCH_QUALITY     - Prioritize quality over speed
104:      MP_PTSCOTCH_SPEED       - Prioritize speed over quality
105:      MP_PTSCOTCH_BALANCE     - Enforce load balance
106:      MP_PTSCOTCH_SAFETY      - Avoid methods that may fail
107:      MP_PTSCOTCH_SCALABILITY - Favor scalability as much as possible
108: .ve

110:    Options Database Key:
111: .  -mat_partitioning_ptscotch_strategy [quality,speed,balance,safety,scalability] - strategy

113:    Level: advanced

115:    Note:
116:    The default is `MP_SCOTCH_QUALITY`. See the PTScotch documentation for more information.

118: .seealso: `MATPARTITIONINGSCOTCH`, `MatPartitioningPTScotchSetImbalance()`, `MatPartitioningPTScotchGetStrategy()`
119: @*/
120: PetscErrorCode MatPartitioningPTScotchSetStrategy(MatPartitioning part, MPPTScotchStrategyType strategy)
121: {
122:   PetscFunctionBegin;
125:   PetscTryMethod(part, "MatPartitioningPTScotchSetStrategy_C", (MatPartitioning, MPPTScotchStrategyType), (part, strategy));
126:   PetscFunctionReturn(PETSC_SUCCESS);
127: }

129: PetscErrorCode MatPartitioningPTScotchSetStrategy_PTScotch(MatPartitioning part, MPPTScotchStrategyType strategy)
130: {
131:   MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;

133:   PetscFunctionBegin;
134:   switch (strategy) {
135:   case MP_PTSCOTCH_QUALITY:
136:     scotch->strategy = SCOTCH_STRATQUALITY;
137:     break;
138:   case MP_PTSCOTCH_SPEED:
139:     scotch->strategy = SCOTCH_STRATSPEED;
140:     break;
141:   case MP_PTSCOTCH_BALANCE:
142:     scotch->strategy = SCOTCH_STRATBALANCE;
143:     break;
144:   case MP_PTSCOTCH_SAFETY:
145:     scotch->strategy = SCOTCH_STRATSAFETY;
146:     break;
147:   case MP_PTSCOTCH_SCALABILITY:
148:     scotch->strategy = SCOTCH_STRATSCALABILITY;
149:     break;
150:   default:
151:     scotch->strategy = SCOTCH_STRATDEFAULT;
152:     break;
153:   }
154:   PetscFunctionReturn(PETSC_SUCCESS);
155: }

157: /*@
158:    MatPartitioningPTScotchGetStrategy - Gets the strategy used in PTScotch.

160:    Not Collective

162:    Input Parameter:
163: .  part - the partitioning context

165:    Output Parameter:
166: .  strategy - the strategy

168:    Level: advanced

170: .seealso: `MATPARTITIONINGSCOTCH`, `MatPartitioningPTScotchSetStrategy()`
171: @*/
172: PetscErrorCode MatPartitioningPTScotchGetStrategy(MatPartitioning part, MPPTScotchStrategyType *strategy)
173: {
174:   PetscFunctionBegin;
177:   PetscUseMethod(part, "MatPartitioningPTScotchGetStrategy_C", (MatPartitioning, MPPTScotchStrategyType *), (part, strategy));
178:   PetscFunctionReturn(PETSC_SUCCESS);
179: }

181: PetscErrorCode MatPartitioningPTScotchGetStrategy_PTScotch(MatPartitioning part, MPPTScotchStrategyType *strategy)
182: {
183:   MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;

185:   PetscFunctionBegin;
186:   switch (scotch->strategy) {
187:   case SCOTCH_STRATQUALITY:
188:     *strategy = MP_PTSCOTCH_QUALITY;
189:     break;
190:   case SCOTCH_STRATSPEED:
191:     *strategy = MP_PTSCOTCH_SPEED;
192:     break;
193:   case SCOTCH_STRATBALANCE:
194:     *strategy = MP_PTSCOTCH_BALANCE;
195:     break;
196:   case SCOTCH_STRATSAFETY:
197:     *strategy = MP_PTSCOTCH_SAFETY;
198:     break;
199:   case SCOTCH_STRATSCALABILITY:
200:     *strategy = MP_PTSCOTCH_SCALABILITY;
201:     break;
202:   default:
203:     *strategy = MP_PTSCOTCH_DEFAULT;
204:     break;
205:   }
206:   PetscFunctionReturn(PETSC_SUCCESS);
207: }

209: PetscErrorCode MatPartitioningView_PTScotch(MatPartitioning part, PetscViewer viewer)
210: {
211:   MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;
212:   PetscBool                 isascii;
213:   const char               *str = NULL;

215:   PetscFunctionBegin;
216:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
217:   if (isascii) {
218:     switch (scotch->strategy) {
219:     case SCOTCH_STRATQUALITY:
220:       str = "Prioritize quality over speed";
221:       break;
222:     case SCOTCH_STRATSPEED:
223:       str = "Prioritize speed over quality";
224:       break;
225:     case SCOTCH_STRATBALANCE:
226:       str = "Enforce load balance";
227:       break;
228:     case SCOTCH_STRATSAFETY:
229:       str = "Avoid methods that may fail";
230:       break;
231:     case SCOTCH_STRATSCALABILITY:
232:       str = "Favor scalability as much as possible";
233:       break;
234:     default:
235:       str = "Default behavior";
236:       break;
237:     }
238:     PetscCall(PetscViewerASCIIPrintf(viewer, "  Strategy=%s\n", str));
239:     PetscCall(PetscViewerASCIIPrintf(viewer, "  Load imbalance ratio=%g\n", scotch->imbalance));
240:   }
241:   PetscFunctionReturn(PETSC_SUCCESS);
242: }

244: PetscErrorCode MatPartitioningSetFromOptions_PTScotch(MatPartitioning part, PetscOptionItems *PetscOptionsObject)
245: {
246:   PetscBool                 flag;
247:   PetscReal                 r;
248:   MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;
249:   MPPTScotchStrategyType    strat;

251:   PetscFunctionBegin;
252:   PetscCall(MatPartitioningPTScotchGetStrategy(part, &strat));
253:   PetscOptionsHeadBegin(PetscOptionsObject, "PTScotch partitioning options");
254:   PetscCall(PetscOptionsEnum("-mat_partitioning_ptscotch_strategy", "Strategy", "MatPartitioningPTScotchSetStrategy", MPPTScotchStrategyTypes, (PetscEnum)strat, (PetscEnum *)&strat, &flag));
255:   if (flag) PetscCall(MatPartitioningPTScotchSetStrategy(part, strat));
256:   PetscCall(PetscOptionsReal("-mat_partitioning_ptscotch_imbalance", "Load imbalance ratio", "MatPartitioningPTScotchSetImbalance", scotch->imbalance, &r, &flag));
257:   if (flag) PetscCall(MatPartitioningPTScotchSetImbalance(part, r));
258:   PetscOptionsHeadEnd();
259:   PetscFunctionReturn(PETSC_SUCCESS);
260: }

262: static PetscErrorCode MatPartitioningApply_PTScotch_Private(MatPartitioning part, PetscBool useND, IS *partitioning)
263: {
264:   MPI_Comm                  pcomm, comm;
265:   MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;
266:   PetscMPIInt               rank;
267:   Mat                       mat = part->adj;
268:   Mat_MPIAdj               *adj = (Mat_MPIAdj *)mat->data;
269:   PetscBool                 flg, distributed;
270:   PetscBool                 proc_weight_flg;
271:   PetscInt                  i, j, p, bs = 1, nold;
272:   PetscInt                 *NDorder = NULL;
273:   PetscReal                *vwgttab, deltval;
274:   SCOTCH_Num               *locals, *velotab, *veloloctab, *edloloctab, vertlocnbr, edgelocnbr, nparts = part->n;

276:   PetscFunctionBegin;
277:   PetscCall(PetscObjectGetComm((PetscObject)part, &pcomm));
278:   /* Duplicate the communicator to be sure that PTSCOTCH attribute caching does not interfere with PETSc. */
279:   PetscCallMPI(MPI_Comm_dup(pcomm, &comm));
280:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
281:   PetscCall(PetscObjectTypeCompare((PetscObject)mat, MATMPIADJ, &flg));
282:   if (!flg) {
283:     /* bs indicates if the converted matrix is "reduced" from the original and hence the
284:        resulting partition results need to be stretched to match the original matrix */
285:     nold = mat->rmap->n;
286:     PetscCall(MatConvert(mat, MATMPIADJ, MAT_INITIAL_MATRIX, &mat));
287:     if (mat->rmap->n > 0) bs = nold / mat->rmap->n;
288:     adj = (Mat_MPIAdj *)mat->data;
289:   }

291:   proc_weight_flg = part->part_weights ? PETSC_TRUE : PETSC_FALSE;
292:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-mat_partitioning_ptscotch_proc_weight", &proc_weight_flg, NULL));

294:   PetscCall(PetscMalloc1(mat->rmap->n + 1, &locals));

296:   if (useND) {
297: #if defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
298:     PetscInt   *sizes, *seps, log2size, subd, *level, base = 0;
299:     PetscMPIInt size;

301:     PetscCallMPI(MPI_Comm_size(comm, &size));
302:     log2size = PetscLog2Real(size);
303:     subd     = PetscPowInt(2, log2size);
304:     PetscCheck(subd == size, comm, PETSC_ERR_SUP, "Only power of 2 communicator sizes");
305:     PetscCall(PetscMalloc1(mat->rmap->n, &NDorder));
306:     PetscCall(PetscMalloc3(2 * size, &sizes, 4 * size, &seps, size, &level));
307:     SCOTCH_ParMETIS_V3_NodeND(mat->rmap->range, adj->i, adj->j, &base, NULL, NDorder, sizes, &comm);
308:     PetscCall(MatPartitioningSizesToSep_Private(subd, sizes, seps, level));
309:     for (i = 0; i < mat->rmap->n; i++) {
310:       PetscInt loc;

312:       PetscCall(PetscFindInt(NDorder[i], 2 * subd, seps, &loc));
313:       if (loc < 0) {
314:         loc = -(loc + 1);
315:         if (loc % 2) { /* part of subdomain */
316:           locals[i] = loc / 2;
317:         } else {
318:           PetscCall(PetscFindInt(NDorder[i], 2 * (subd - 1), seps + 2 * subd, &loc));
319:           loc       = loc < 0 ? -(loc + 1) / 2 : loc / 2;
320:           locals[i] = level[loc];
321:         }
322:       } else locals[i] = loc / 2;
323:     }
324:     PetscCall(PetscFree3(sizes, seps, level));
325: #else
326:     SETERRQ(pcomm, PETSC_ERR_SUP, "Need libptscotchparmetis.a compiled with -DSCOTCH_METIS_PREFIX");
327: #endif
328:   } else {
329:     velotab = NULL;
330:     if (proc_weight_flg) {
331:       PetscCall(PetscMalloc1(nparts, &vwgttab));
332:       PetscCall(PetscMalloc1(nparts, &velotab));
333:       for (j = 0; j < nparts; j++) {
334:         if (part->part_weights) vwgttab[j] = part->part_weights[j] * nparts;
335:         else vwgttab[j] = 1.0;
336:       }
337:       for (i = 0; i < nparts; i++) {
338:         deltval = PetscAbsReal(vwgttab[i] - PetscFloorReal(vwgttab[i] + 0.5));
339:         if (deltval > 0.01) {
340:           for (j = 0; j < nparts; j++) vwgttab[j] /= deltval;
341:         }
342:       }
343:       for (i = 0; i < nparts; i++) velotab[i] = (SCOTCH_Num)(vwgttab[i] + 0.5);
344:       PetscCall(PetscFree(vwgttab));
345:     }

347:     vertlocnbr = mat->rmap->range[rank + 1] - mat->rmap->range[rank];
348:     edgelocnbr = adj->i[vertlocnbr];
349:     veloloctab = part->vertex_weights;
350:     edloloctab = part->use_edge_weights ? adj->values : NULL;

352:     /* detect whether all vertices are located at the same process in original graph */
353:     for (p = 0; !mat->rmap->range[p + 1] && p < nparts; ++p)
354:       ;
355:     distributed = (mat->rmap->range[p + 1] == mat->rmap->N) ? PETSC_FALSE : PETSC_TRUE;
356:     if (distributed) {
357:       SCOTCH_Arch     archdat;
358:       SCOTCH_Dgraph   grafdat;
359:       SCOTCH_Dmapping mappdat;
360:       SCOTCH_Strat    stradat;

362:       PetscCallExternal(SCOTCH_dgraphInit, &grafdat, comm);
363:       PetscCallExternal(SCOTCH_dgraphBuild, &grafdat, 0, vertlocnbr, vertlocnbr, adj->i, adj->i + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adj->j, NULL, edloloctab);

365:       if (PetscDefined(USE_DEBUG)) PetscCallExternal(SCOTCH_dgraphCheck, &grafdat);

367:       PetscCallExternal(SCOTCH_archInit, &archdat);
368:       PetscCallExternal(SCOTCH_stratInit, &stradat);
369:       PetscCallExternal(SCOTCH_stratDgraphMapBuild, &stradat, scotch->strategy, nparts, nparts, scotch->imbalance);

371:       if (velotab) {
372:         PetscCallExternal(SCOTCH_archCmpltw, &archdat, nparts, velotab);
373:       } else {
374:         PetscCallExternal(SCOTCH_archCmplt, &archdat, nparts);
375:       }
376:       PetscCallExternal(SCOTCH_dgraphMapInit, &grafdat, &mappdat, &archdat, locals);
377:       PetscCallExternal(SCOTCH_dgraphMapCompute, &grafdat, &mappdat, &stradat);

379:       SCOTCH_dgraphMapExit(&grafdat, &mappdat);
380:       SCOTCH_archExit(&archdat);
381:       SCOTCH_stratExit(&stradat);
382:       SCOTCH_dgraphExit(&grafdat);

384:     } else if (rank == p) {
385:       SCOTCH_Graph grafdat;
386:       SCOTCH_Strat stradat;

388:       PetscCallExternal(SCOTCH_graphInit, &grafdat);
389:       PetscCallExternal(SCOTCH_graphBuild, &grafdat, 0, vertlocnbr, adj->i, adj->i + 1, veloloctab, NULL, edgelocnbr, adj->j, edloloctab);
390:       if (PetscDefined(USE_DEBUG)) PetscCallExternal(SCOTCH_graphCheck, &grafdat);
391:       PetscCallExternal(SCOTCH_stratInit, &stradat);
392:       PetscCallExternal(SCOTCH_stratGraphMapBuild, &stradat, scotch->strategy, nparts, scotch->imbalance);
393:       if (velotab) {
394:         SCOTCH_Arch archdat;
395:         PetscCallExternal(SCOTCH_archInit, &archdat);
396:         PetscCallExternal(SCOTCH_archCmpltw, &archdat, nparts, velotab);
397:         PetscCallExternal(SCOTCH_graphMap, &grafdat, &archdat, &stradat, locals);
398:         SCOTCH_archExit(&archdat);
399:       } else {
400:         PetscCallExternal(SCOTCH_graphPart, &grafdat, nparts, &stradat, locals);
401:       }
402:       SCOTCH_stratExit(&stradat);
403:       SCOTCH_graphExit(&grafdat);
404:     }

406:     PetscCall(PetscFree(velotab));
407:   }
408:   PetscCallMPI(MPI_Comm_free(&comm));

410:   if (bs > 1) {
411:     PetscInt *newlocals;
412:     PetscCall(PetscMalloc1(bs * mat->rmap->n, &newlocals));
413:     for (i = 0; i < mat->rmap->n; i++) {
414:       for (j = 0; j < bs; j++) newlocals[bs * i + j] = locals[i];
415:     }
416:     PetscCall(PetscFree(locals));
417:     PetscCall(ISCreateGeneral(pcomm, bs * mat->rmap->n, newlocals, PETSC_OWN_POINTER, partitioning));
418:   } else {
419:     PetscCall(ISCreateGeneral(pcomm, mat->rmap->n, locals, PETSC_OWN_POINTER, partitioning));
420:   }
421:   if (useND) {
422:     IS ndis;

424:     if (bs > 1) {
425:       PetscCall(ISCreateBlock(pcomm, bs, mat->rmap->n, NDorder, PETSC_OWN_POINTER, &ndis));
426:     } else {
427:       PetscCall(ISCreateGeneral(pcomm, mat->rmap->n, NDorder, PETSC_OWN_POINTER, &ndis));
428:     }
429:     PetscCall(ISSetPermutation(ndis));
430:     PetscCall(PetscObjectCompose((PetscObject)(*partitioning), "_petsc_matpartitioning_ndorder", (PetscObject)ndis));
431:     PetscCall(ISDestroy(&ndis));
432:   }

434:   if (!flg) PetscCall(MatDestroy(&mat));
435:   PetscFunctionReturn(PETSC_SUCCESS);
436: }

438: PetscErrorCode MatPartitioningApply_PTScotch(MatPartitioning part, IS *partitioning)
439: {
440:   PetscFunctionBegin;
441:   PetscCall(MatPartitioningApply_PTScotch_Private(part, PETSC_FALSE, partitioning));
442:   PetscFunctionReturn(PETSC_SUCCESS);
443: }

445: PetscErrorCode MatPartitioningApplyND_PTScotch(MatPartitioning part, IS *partitioning)
446: {
447:   PetscFunctionBegin;
448:   PetscCall(MatPartitioningApply_PTScotch_Private(part, PETSC_TRUE, partitioning));
449:   PetscFunctionReturn(PETSC_SUCCESS);
450: }

452: PetscErrorCode MatPartitioningDestroy_PTScotch(MatPartitioning part)
453: {
454:   MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;

456:   PetscFunctionBegin;
457:   PetscCall(PetscFree(scotch));
458:   /* clear composed functions */
459:   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchSetImbalance_C", NULL));
460:   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchGetImbalance_C", NULL));
461:   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchSetStrategy_C", NULL));
462:   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchGetStrategy_C", NULL));
463:   PetscFunctionReturn(PETSC_SUCCESS);
464: }

466: /*MC
467:    MATPARTITIONINGPTSCOTCH - Creates a partitioning context that uses the external package SCOTCH.

469:    Level: beginner

471:    Note:
472:     See http://www.labri.fr/perso/pelegrin/scotch/

474: .seealso: `MatPartitioningSetType()`, `MatPartitioningType`, `MatPartitioningPTScotchSetImbalance()`, `MatPartitioningPTScotchGetImbalance()`,
475:           `MatPartitioningPTScotchSetStrategy()`, `MatPartitioningPTScotchGetStrategy()`
476: M*/

478: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_PTScotch(MatPartitioning part)
479: {
480:   MatPartitioning_PTScotch *scotch;

482:   PetscFunctionBegin;
483:   PetscCall(PetscNew(&scotch));
484:   part->data = (void *)scotch;

486:   scotch->imbalance = 0.01;
487:   scotch->strategy  = SCOTCH_STRATDEFAULT;

489:   part->ops->apply          = MatPartitioningApply_PTScotch;
490:   part->ops->applynd        = MatPartitioningApplyND_PTScotch;
491:   part->ops->view           = MatPartitioningView_PTScotch;
492:   part->ops->setfromoptions = MatPartitioningSetFromOptions_PTScotch;
493:   part->ops->destroy        = MatPartitioningDestroy_PTScotch;

495:   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchSetImbalance_C", MatPartitioningPTScotchSetImbalance_PTScotch));
496:   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchGetImbalance_C", MatPartitioningPTScotchGetImbalance_PTScotch));
497:   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchSetStrategy_C", MatPartitioningPTScotchSetStrategy_PTScotch));
498:   PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchGetStrategy_C", MatPartitioningPTScotchGetStrategy_PTScotch));
499:   PetscFunctionReturn(PETSC_SUCCESS);
500: }