Actual source code: partition.c


  2: #include <petsc/private/matimpl.h>

  4: /* Logging support */
  5: PetscClassId MAT_PARTITIONING_CLASSID;

  7: /*
  8:    Simplest partitioning, keeps the current partitioning.
  9: */
 10: static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part, IS *partitioning)
 11: {
 12:   PetscInt    m;
 13:   PetscMPIInt rank, size;

 15:   PetscFunctionBegin;
 16:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)part), &size));
 17:   if (part->n != size) {
 18:     const char *prefix;
 19:     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)part, &prefix));
 20:     SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -%smat_partitioning_type parmetis or chaco or ptscotch for more than one subdomain per processor", prefix ? prefix : "");
 21:   }
 22:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)part), &rank));

 24:   PetscCall(MatGetLocalSize(part->adj, &m, NULL));
 25:   PetscCall(ISCreateStride(PetscObjectComm((PetscObject)part), m, rank, 0, partitioning));
 26:   PetscFunctionReturn(PETSC_SUCCESS);
 27: }

 29: /*
 30:    partition an index to rebalance the computation
 31: */
 32: static PetscErrorCode MatPartitioningApply_Average(MatPartitioning part, IS *partitioning)
 33: {
 34:   PetscInt m, M, nparts, *indices, r, d, *parts, i, start, end, loc;

 36:   PetscFunctionBegin;
 37:   PetscCall(MatGetSize(part->adj, &M, NULL));
 38:   PetscCall(MatGetLocalSize(part->adj, &m, NULL));
 39:   nparts = part->n;
 40:   PetscCall(PetscMalloc1(nparts, &parts));
 41:   d = M / nparts;
 42:   for (i = 0; i < nparts; i++) parts[i] = d;
 43:   r = M % nparts;
 44:   for (i = 0; i < r; i++) parts[i] += 1;
 45:   for (i = 1; i < nparts; i++) parts[i] += parts[i - 1];
 46:   PetscCall(PetscMalloc1(m, &indices));
 47:   PetscCall(MatGetOwnershipRange(part->adj, &start, &end));
 48:   for (i = start; i < end; i++) {
 49:     PetscCall(PetscFindInt(i, nparts, parts, &loc));
 50:     if (loc < 0) loc = -(loc + 1);
 51:     else loc = loc + 1;
 52:     indices[i - start] = loc;
 53:   }
 54:   PetscCall(PetscFree(parts));
 55:   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), m, indices, PETSC_OWN_POINTER, partitioning));
 56:   PetscFunctionReturn(PETSC_SUCCESS);
 57: }

 59: static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part, IS *partitioning)
 60: {
 61:   PetscInt    cell, n, N, p, rstart, rend, *color;
 62:   PetscMPIInt size;

 64:   PetscFunctionBegin;
 65:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)part), &size));
 66:   PetscCheck(part->n == size, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Currently only supports one domain per processor");
 67:   p = (PetscInt)PetscSqrtReal((PetscReal)part->n);
 68:   PetscCheck(p * p == part->n, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Square partitioning requires \"perfect square\" number of domains");

 70:   PetscCall(MatGetSize(part->adj, &N, NULL));
 71:   n = (PetscInt)PetscSqrtReal((PetscReal)N);
 72:   PetscCheck(n * n == N, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Square partitioning requires square domain");
 73:   PetscCheck(n % p == 0, PETSC_COMM_SELF, PETSC_ERR_SUP, "Square partitioning requires p to divide n");
 74:   PetscCall(MatGetOwnershipRange(part->adj, &rstart, &rend));
 75:   PetscCall(PetscMalloc1(rend - rstart, &color));
 76:   /* for (int cell=rstart; cell<rend; cell++) color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); */
 77:   for (cell = rstart; cell < rend; cell++) color[cell - rstart] = ((cell % n) / (n / p)) + p * ((cell / n) / (n / p));
 78:   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), rend - rstart, color, PETSC_OWN_POINTER, partitioning));
 79:   PetscFunctionReturn(PETSC_SUCCESS);
 80: }

 82: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
 83: {
 84:   PetscFunctionBegin;
 85:   part->ops->apply   = MatPartitioningApply_Current;
 86:   part->ops->view    = NULL;
 87:   part->ops->destroy = NULL;
 88:   PetscFunctionReturn(PETSC_SUCCESS);
 89: }

 91: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Average(MatPartitioning part)
 92: {
 93:   PetscFunctionBegin;
 94:   part->ops->apply   = MatPartitioningApply_Average;
 95:   part->ops->view    = NULL;
 96:   part->ops->destroy = NULL;
 97:   PetscFunctionReturn(PETSC_SUCCESS);
 98: }

100: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
101: {
102:   PetscFunctionBegin;
103:   part->ops->apply   = MatPartitioningApply_Square;
104:   part->ops->view    = NULL;
105:   part->ops->destroy = NULL;
106:   PetscFunctionReturn(PETSC_SUCCESS);
107: }

109: /* gets as input the "sizes" array computed by ParMetis_*_NodeND and returns
110:        seps[  0 :         2*p) : the start and end node of each subdomain
111:        seps[2*p : 2*p+2*(p-1)) : the start and end node of each separator
112:      levels[  0 :         p-1) : level in the tree for each separator (-1 root, -2 and -3 first level and so on)
113:    The arrays must be large enough
114: */
115: PETSC_INTERN PetscErrorCode MatPartitioningSizesToSep_Private(PetscInt p, PetscInt sizes[], PetscInt seps[], PetscInt level[])
116: {
117:   PetscInt l2p, i, pTree, pStartTree;

119:   PetscFunctionBegin;
120:   l2p = PetscLog2Real(p);
121:   PetscCheck(!(l2p - (PetscInt)PetscLog2Real(p)), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "%" PetscInt_FMT " is not a power of 2", p);
122:   if (!p) PetscFunctionReturn(PETSC_SUCCESS);
123:   PetscCall(PetscArrayzero(seps, 2 * p - 2));
124:   PetscCall(PetscArrayzero(level, p - 1));
125:   seps[2 * p - 2] = sizes[2 * p - 2];
126:   pTree           = p;
127:   pStartTree      = 0;
128:   while (pTree != 1) {
129:     for (i = pStartTree; i < pStartTree + pTree; i++) {
130:       seps[i] += sizes[i];
131:       seps[pStartTree + pTree + (i - pStartTree) / 2] += seps[i];
132:     }
133:     pStartTree += pTree;
134:     pTree = pTree / 2;
135:   }
136:   seps[2 * p - 2] -= sizes[2 * p - 2];

138:   pStartTree = 2 * p - 2;
139:   pTree      = 1;
140:   while (pStartTree > 0) {
141:     for (i = pStartTree; i < pStartTree + pTree; i++) {
142:       PetscInt k = 2 * i - (pStartTree + 2 * pTree);
143:       PetscInt n = seps[k + 1];

145:       seps[k + 1]  = seps[i] - sizes[k + 1];
146:       seps[k]      = seps[k + 1] + sizes[k + 1] - n - sizes[k];
147:       level[i - p] = -pTree - i + pStartTree;
148:     }
149:     pTree *= 2;
150:     pStartTree -= pTree;
151:   }
152:   /* I know there should be a formula */
153:   PetscCall(PetscSortIntWithArrayPair(p - 1, seps + p, sizes + p, level));
154:   for (i = 2 * p - 2; i >= 0; i--) {
155:     seps[2 * i]     = seps[i];
156:     seps[2 * i + 1] = seps[i] + PetscMax(sizes[i] - 1, 0);
157:   }
158:   PetscFunctionReturn(PETSC_SUCCESS);
159: }

161: PetscFunctionList MatPartitioningList              = NULL;
162: PetscBool         MatPartitioningRegisterAllCalled = PETSC_FALSE;

164: /*@C
165:    MatPartitioningRegister - Adds a new sparse matrix partitioning to the  matrix package.

167:    Not Collective

169:    Input Parameters:
170: +  sname - name of partitioning (for example `MATPARTITIONINGCURRENT`) or `MATPARTITIONINGPARMETIS`
171: -  function - function pointer that creates the partitioning type

173:    Level: developer

175:    Sample usage:
176: .vb
177:    MatPartitioningRegister("my_part", MyPartCreate);
178: .ve

180:    Then, your partitioner can be chosen with the procedural interface via
181: $     MatPartitioningSetType(part, "my_part")
182:    or at runtime via the option
183: $     -mat_partitioning_type my_part

185: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`, `MatPartitioningRegisterDestroy()`, `MatPartitioningRegisterAll()`
186: @*/
187: PetscErrorCode MatPartitioningRegister(const char sname[], PetscErrorCode (*function)(MatPartitioning))
188: {
189:   PetscFunctionBegin;
190:   PetscCall(MatInitializePackage());
191:   PetscCall(PetscFunctionListAdd(&MatPartitioningList, sname, function));
192:   PetscFunctionReturn(PETSC_SUCCESS);
193: }

195: /*@C
196:    MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
197:         from the partitioning context.

199:    Not Collective

201:    Input Parameter:
202: .  partitioning - the partitioning context

204:    Output Parameter:
205: .  type - partitioner type

207:    Level: intermediate

209: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`, `MatPartitioningRegisterDestroy()`, `MatPartitioningRegisterAll()`
210: @*/
211: PetscErrorCode MatPartitioningGetType(MatPartitioning partitioning, MatPartitioningType *type)
212: {
213:   PetscFunctionBegin;
216:   *type = ((PetscObject)partitioning)->type_name;
217:   PetscFunctionReturn(PETSC_SUCCESS);
218: }

220: /*@C
221:    MatPartitioningSetNParts - Set how many partitions need to be created;
222:         by default this is one per processor. Certain partitioning schemes may
223:         in fact only support that option.

225:    Collective

227:    Input Parameters:
228: +  partitioning - the partitioning context
229: -  n - the number of partitions

231:    Level: intermediate

233: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningApply()`
234: @*/
235: PetscErrorCode MatPartitioningSetNParts(MatPartitioning part, PetscInt n)
236: {
237:   PetscFunctionBegin;
238:   part->n = n;
239:   PetscFunctionReturn(PETSC_SUCCESS);
240: }

242: /*@
243:    MatPartitioningApplyND - Gets a nested dissection partitioning for a matrix.

245:    Collective

247:    Input Parameter:
248: .  matp - the matrix partitioning object

250:    Output Parameter:
251: .   partitioning - the partitioning. For each local node, a positive value indicates the processor
252:                    number the node has been assigned to. Negative x values indicate the separator level -(x+1).

254:    Level: intermediate

256:    Note:
257:    The user can define additional partitionings; see `MatPartitioningRegister()`.

259: .seealso: [](ch_matrices), `Mat`, `MatPartitioningApplyND()`, `MatPartitioningRegister()`, `MatPartitioningCreate()`,
260:           `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
261:           `ISPartitioningCount()`
262: @*/
263: PetscErrorCode MatPartitioningApplyND(MatPartitioning matp, IS *partitioning)
264: {
265:   PetscFunctionBegin;
268:   PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
269:   PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
270:   PetscCall(PetscLogEventBegin(MAT_PartitioningND, matp, 0, 0, 0));
271:   PetscUseTypeMethod(matp, applynd, partitioning);
272:   PetscCall(PetscLogEventEnd(MAT_PartitioningND, matp, 0, 0, 0));

274:   PetscCall(MatPartitioningViewFromOptions(matp, NULL, "-mat_partitioning_view"));
275:   PetscCall(ISViewFromOptions(*partitioning, NULL, "-mat_partitioning_view"));
276:   PetscFunctionReturn(PETSC_SUCCESS);
277: }

279: /*@
280:    MatPartitioningApply - Gets a partitioning for the graph represented by a sparse matrix.

282:    Collective

284:    Input Parameter:
285: .  matp - the matrix partitioning object

287:    Output Parameter:
288: .   partitioning - the partitioning. For each local node this tells the processor
289:                    number that that node is assigned to.

291:    Options Database Keys:
292: +    -mat_partitioning_type <type> - set the partitioning package or algorithm to use
293: -    -mat_partitioning_view - display information about the partitioning object

295:    Level: beginner

297:    The user can define additional partitionings; see `MatPartitioningRegister()`.

299: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningRegister()`, `MatPartitioningCreate()`,
300:           `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
301:           `ISPartitioningCount()`
302: @*/
303: PetscErrorCode MatPartitioningApply(MatPartitioning matp, IS *partitioning)
304: {
305:   PetscBool viewbalance, improve;

307:   PetscFunctionBegin;
310:   PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
311:   PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
312:   PetscCall(PetscLogEventBegin(MAT_Partitioning, matp, 0, 0, 0));
313:   PetscUseTypeMethod(matp, apply, partitioning);
314:   PetscCall(PetscLogEventEnd(MAT_Partitioning, matp, 0, 0, 0));

316:   PetscCall(MatPartitioningViewFromOptions(matp, NULL, "-mat_partitioning_view"));
317:   PetscCall(ISViewFromOptions(*partitioning, NULL, "-mat_partitioning_view"));

319:   PetscObjectOptionsBegin((PetscObject)matp);
320:   viewbalance = PETSC_FALSE;
321:   PetscCall(PetscOptionsBool("-mat_partitioning_view_imbalance", "Display imbalance information of a partition", NULL, PETSC_FALSE, &viewbalance, NULL));
322:   improve = PETSC_FALSE;
323:   PetscCall(PetscOptionsBool("-mat_partitioning_improve", "Improve the quality of a partition", NULL, PETSC_FALSE, &improve, NULL));
324:   PetscOptionsEnd();

326:   if (improve) PetscCall(MatPartitioningImprove(matp, partitioning));

328:   if (viewbalance) PetscCall(MatPartitioningViewImbalance(matp, *partitioning));
329:   PetscFunctionReturn(PETSC_SUCCESS);
330: }

332: /*@
333:    MatPartitioningImprove - Improves the quality of a given partition.

335:    Collective

337:    Input Parameters:
338: +  matp - the matrix partitioning object
339: -  partitioning - the original partitioning. For each local node this tells the processor
340:                    number that that node is assigned to.

342:    Options Database Key:
343: .   -mat_partitioning_improve - improve the quality of the given partition

345:    Level: beginner

347: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningApply()`, `MatPartitioningCreate()`,
348:           `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
349:           `ISPartitioningCount()`
350: @*/
351: PetscErrorCode MatPartitioningImprove(MatPartitioning matp, IS *partitioning)
352: {
353:   PetscFunctionBegin;
356:   PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
357:   PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
358:   PetscCall(PetscLogEventBegin(MAT_Partitioning, matp, 0, 0, 0));
359:   PetscTryTypeMethod(matp, improve, partitioning);
360:   PetscCall(PetscLogEventEnd(MAT_Partitioning, matp, 0, 0, 0));
361:   PetscFunctionReturn(PETSC_SUCCESS);
362: }

364: /*@
365:    MatPartitioningViewImbalance - Display partitioning imbalance information.

367:    Collective

369:    Input Parameters:
370: +  matp - the matrix partitioning object
371: -  partitioning - the partitioning. For each local node this tells the processor
372:                    number that that node is assigned to.

374:    Options Database Key:
375: .    -mat_partitioning_view_balance - view the balance information from the last partitioning

377:    Level: beginner

379: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningApply()`, `MatPartitioningView()`
380: @*/
381: PetscErrorCode MatPartitioningViewImbalance(MatPartitioning matp, IS partitioning)
382: {
383:   PetscInt        nparts, *subdomainsizes, *subdomainsizes_tmp, nlocal, i, maxsub, minsub, avgsub;
384:   const PetscInt *indices;
385:   PetscViewer     viewer;

387:   PetscFunctionBegin;
390:   nparts = matp->n;
391:   PetscCall(PetscCalloc2(nparts, &subdomainsizes, nparts, &subdomainsizes_tmp));
392:   PetscCall(ISGetLocalSize(partitioning, &nlocal));
393:   PetscCall(ISGetIndices(partitioning, &indices));
394:   for (i = 0; i < nlocal; i++) subdomainsizes_tmp[indices[i]] += matp->vertex_weights ? matp->vertex_weights[i] : 1;
395:   PetscCall(MPIU_Allreduce(subdomainsizes_tmp, subdomainsizes, nparts, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)matp)));
396:   PetscCall(ISRestoreIndices(partitioning, &indices));
397:   minsub = PETSC_MAX_INT, maxsub = PETSC_MIN_INT, avgsub = 0;
398:   for (i = 0; i < nparts; i++) {
399:     minsub = PetscMin(minsub, subdomainsizes[i]);
400:     maxsub = PetscMax(maxsub, subdomainsizes[i]);
401:     avgsub += subdomainsizes[i];
402:   }
403:   avgsub /= nparts;
404:   PetscCall(PetscFree2(subdomainsizes, subdomainsizes_tmp));
405:   PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)matp), &viewer));
406:   PetscCall(MatPartitioningView(matp, viewer));
407:   PetscCall(PetscViewerASCIIPrintf(viewer, "Partitioning Imbalance Info: Max %" PetscInt_FMT ", Min %" PetscInt_FMT ", Avg %" PetscInt_FMT ", R %g\n", maxsub, minsub, avgsub, (double)(maxsub / (PetscReal)minsub)));
408:   PetscFunctionReturn(PETSC_SUCCESS);
409: }

411: /*@
412:    MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
413:       partitioned.

415:    Collective

417:    Input Parameters:
418: +  part - the partitioning context
419: -  adj - the adjacency matrix, this can be any `MatType` but the natural representation is `MATMPIADJ`

421:    Level: beginner

423: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`
424: @*/
425: PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning part, Mat adj)
426: {
427:   PetscFunctionBegin;
430:   part->adj = adj;
431:   PetscFunctionReturn(PETSC_SUCCESS);
432: }

434: /*@
435:    MatPartitioningDestroy - Destroys the partitioning context.

437:    Collective

439:    Input Parameter:
440: .  part - the partitioning context

442:    Level: beginner

444: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`
445: @*/
446: PetscErrorCode MatPartitioningDestroy(MatPartitioning *part)
447: {
448:   PetscFunctionBegin;
449:   if (!*part) PetscFunctionReturn(PETSC_SUCCESS);
451:   if (--((PetscObject)(*part))->refct > 0) {
452:     *part = NULL;
453:     PetscFunctionReturn(PETSC_SUCCESS);
454:   }

456:   if ((*part)->ops->destroy) PetscCall((*(*part)->ops->destroy)((*part)));
457:   PetscCall(PetscFree((*part)->vertex_weights));
458:   PetscCall(PetscFree((*part)->part_weights));
459:   PetscCall(PetscHeaderDestroy(part));
460:   PetscFunctionReturn(PETSC_SUCCESS);
461: }

463: /*@C
464:    MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.

466:    Logically Collective

468:    Input Parameters:
469: +  part - the partitioning context
470: -  weights - the weights, on each process this array must have the same size as the number of local rows times the value passed with `MatPartitioningSetNumberVertexWeights()` or
471:              1 if that is not provided

473:    Level: beginner

475:    Notes:
476:       The array weights is freed by PETSc so the user should not free the array. In C/C++
477:    the array must be obtained with a call to `PetscMalloc()`, not malloc().

479:    The weights may not be used by some partitioners

481: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetPartitionWeights()`, `MatPartitioningSetNumberVertexWeights()`
482: @*/
483: PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning part, const PetscInt weights[])
484: {
485:   PetscFunctionBegin;
487:   PetscCall(PetscFree(part->vertex_weights));
488:   part->vertex_weights = (PetscInt *)weights;
489:   PetscFunctionReturn(PETSC_SUCCESS);
490: }

492: /*@C
493:    MatPartitioningSetPartitionWeights - Sets the weights for each partition.

495:    Logically Collective

497:    Input Parameters:
498: +  part - the partitioning context
499: -  weights - An array of size nparts that is used to specify the fraction of
500:              vertex weight that should be distributed to each sub-domain for
501:              the balance constraint. If all of the sub-domains are to be of
502:              the same size, then each of the nparts elements should be set
503:              to a value of 1/nparts. Note that the sum of all of the weights
504:              should be one.

506:    Level: beginner

508:    Note:
509:       The array weights is freed by PETSc so the user should not free the array. In C/C++
510:    the array must be obtained with a call to `PetscMalloc()`, not malloc().

512: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetVertexWeights()`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`
513: @*/
514: PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning part, const PetscReal weights[])
515: {
516:   PetscFunctionBegin;
518:   PetscCall(PetscFree(part->part_weights));
519:   part->part_weights = (PetscReal *)weights;
520:   PetscFunctionReturn(PETSC_SUCCESS);
521: }

523: /*@
524:    MatPartitioningSetUseEdgeWeights - Set a flag to indicate whether or not to use edge weights.

526:    Logically Collective

528:    Input Parameters:
529: +  part - the partitioning context
530: -  use_edge_weights - the flag indicateing whether or not to use edge weights. By default no edge weights will be used,
531:                       that is, use_edge_weights is set to FALSE. If set use_edge_weights to TRUE, users need to make sure legal
532:                       edge weights are stored in an ADJ matrix.

534:   Options Database Key:
535: .  -mat_partitioning_use_edge_weights - (true or false)

537:    Level: beginner

539: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`, `MatPartitioningSetPartitionWeights()`
540: @*/
541: PetscErrorCode MatPartitioningSetUseEdgeWeights(MatPartitioning part, PetscBool use_edge_weights)
542: {
543:   PetscFunctionBegin;
545:   part->use_edge_weights = use_edge_weights;
546:   PetscFunctionReturn(PETSC_SUCCESS);
547: }

549: /*@
550:    MatPartitioningGetUseEdgeWeights - Get a flag that indicates whether or not to edge weights are used.

552:    Logically Collective

554:    Input Parameter:
555: .  part - the partitioning context

557:    Output Parameter:
558: .  use_edge_weights - the flag indicateing whether or not to edge weights are used.

560:    Level: beginner

562: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`, `MatPartitioningSetPartitionWeights()`,
563:           `MatPartitioningSetUseEdgeWeights`
564: @*/
565: PetscErrorCode MatPartitioningGetUseEdgeWeights(MatPartitioning part, PetscBool *use_edge_weights)
566: {
567:   PetscFunctionBegin;
570:   *use_edge_weights = part->use_edge_weights;
571:   PetscFunctionReturn(PETSC_SUCCESS);
572: }

574: /*@
575:    MatPartitioningCreate - Creates a partitioning context.

577:    Collective

579:    Input Parameter:
580: .   comm - MPI communicator

582:    Output Parameter:
583: .  newp - location to put the context

585:    Level: beginner

587: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetType()`, `MatPartitioningApply()`, `MatPartitioningDestroy()`,
588:           `MatPartitioningSetAdjacency()`
589: @*/
590: PetscErrorCode MatPartitioningCreate(MPI_Comm comm, MatPartitioning *newp)
591: {
592:   MatPartitioning part;
593:   PetscMPIInt     size;

595:   PetscFunctionBegin;
596:   *newp = NULL;

598:   PetscCall(MatInitializePackage());
599:   PetscCall(PetscHeaderCreate(part, MAT_PARTITIONING_CLASSID, "MatPartitioning", "Matrix/graph partitioning", "MatOrderings", comm, MatPartitioningDestroy, MatPartitioningView));
600:   part->vertex_weights   = NULL;
601:   part->part_weights     = NULL;
602:   part->use_edge_weights = PETSC_FALSE; /* By default we don't use edge weights */

604:   PetscCallMPI(MPI_Comm_size(comm, &size));
605:   part->n    = (PetscInt)size;
606:   part->ncon = 1;

608:   *newp = part;
609:   PetscFunctionReturn(PETSC_SUCCESS);
610: }

612: /*@C
613:    MatPartitioningViewFromOptions - View a partitioning context from the options database

615:    Collective

617:    Input Parameters:
618: +  A - the partitioning context
619: .  obj - Optional object that provides the prefix used in the options database check
620: -  name - command line option

622:   Options Database Key:
623: .  -mat_partitioning_view [viewertype]:... - the viewer and its options

625:    Level: intermediate

627:   Note:
628: .vb
629:     If no value is provided ascii:stdout is used
630:        ascii[:[filename][:[format][:append]]]    defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
631:                                                   for example ascii::ascii_info prints just the information about the object not all details
632:                                                   unless :append is given filename opens in write mode, overwriting what was already there
633:        binary[:[filename][:[format][:append]]]   defaults to the file binaryoutput
634:        draw[:drawtype[:filename]]                for example, draw:tikz, draw:tikz:figure.tex  or draw:x
635:        socket[:port]                             defaults to the standard output port
636:        saws[:communicatorname]                    publishes object to the Scientific Application Webserver (SAWs)
637: .ve

639: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningView()`, `PetscObjectViewFromOptions()`, `MatPartitioningCreate()`
640: @*/
641: PetscErrorCode MatPartitioningViewFromOptions(MatPartitioning A, PetscObject obj, const char name[])
642: {
643:   PetscFunctionBegin;
645:   PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
646:   PetscFunctionReturn(PETSC_SUCCESS);
647: }

649: /*@C
650:    MatPartitioningView - Prints the partitioning data structure.

652:    Collective

654:    Input Parameters:
655: +  part - the partitioning context
656: -  viewer - optional visualization context

658:    Level: intermediate

660:    Note:
661:    The available visualization contexts include
662: +     `PETSC_VIEWER_STDOUT_SELF` - standard output (default)
663: -     `PETSC_VIEWER_STDOUT_WORLD` - synchronized standard
664:          output where only the first processor opens
665:          the file.  All other processors send their
666:          data to the first processor to print.

668:    The user can open alternative visualization contexts with
669: .     `PetscViewerASCIIOpen()` - output to a specified file

671: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `PetscViewer`, `PetscViewerASCIIOpen()`
672: @*/
673: PetscErrorCode MatPartitioningView(MatPartitioning part, PetscViewer viewer)
674: {
675:   PetscBool iascii;

677:   PetscFunctionBegin;
679:   if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)part), &viewer));
681:   PetscCheckSameComm(part, 1, viewer, 2);

683:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
684:   if (iascii) {
685:     PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)part, viewer));
686:     if (part->vertex_weights) PetscCall(PetscViewerASCIIPrintf(viewer, "  Using vertex weights\n"));
687:   }
688:   PetscCall(PetscViewerASCIIPushTab(viewer));
689:   PetscTryTypeMethod(part, view, viewer);
690:   PetscCall(PetscViewerASCIIPopTab(viewer));
691:   PetscFunctionReturn(PETSC_SUCCESS);
692: }

694: /*@C
695:    MatPartitioningSetType - Sets the type of partitioner to use

697:    Collective

699:    Input Parameters:
700: +  part - the partitioning context.
701: -  type - a known method

703:    Options Database Key:
704: .  -mat_partitioning_type  <type> - (for instance, parmetis), use -help for a list of available methods or see  `MatPartitioningType`

706:    Level: intermediate

708: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningApply()`, `MatPartitioningType`
709: @*/
710: PetscErrorCode MatPartitioningSetType(MatPartitioning part, MatPartitioningType type)
711: {
712:   PetscBool match;
713:   PetscErrorCode (*r)(MatPartitioning);

715:   PetscFunctionBegin;

719:   PetscCall(PetscObjectTypeCompare((PetscObject)part, type, &match));
720:   if (match) PetscFunctionReturn(PETSC_SUCCESS);

722:   PetscTryTypeMethod(part, destroy);
723:   part->ops->destroy = NULL;

725:   part->setupcalled = 0;
726:   part->data        = NULL;
727:   PetscCall(PetscMemzero(part->ops, sizeof(struct _MatPartitioningOps)));

729:   PetscCall(PetscFunctionListFind(MatPartitioningList, type, &r));
730:   PetscCheck(r, PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown partitioning type %s", type);

732:   PetscCall((*r)(part));

734:   PetscCall(PetscFree(((PetscObject)part)->type_name));
735:   PetscCall(PetscStrallocpy(type, &((PetscObject)part)->type_name));
736:   PetscFunctionReturn(PETSC_SUCCESS);
737: }

739: /*@
740:    MatPartitioningSetFromOptions - Sets various partitioning options from the
741:         options database for the partitioning object

743:    Collective

745:    Input Parameter:
746: .  part - the partitioning context.

748:    Options Database Keys:
749: +  -mat_partitioning_type  <type> - (for instance, parmetis), use -help for a list of available methods
750: -  -mat_partitioning_nparts - number of subgraphs

752:    Level: beginner

754:    Note:
755:     If the partitioner has not been set by the user it uses one of the installed partitioner such as ParMetis. If there are
756:    no installed partitioners it does no repartioning.

758: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`
759: @*/
760: PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning part)
761: {
762:   PetscBool   flag;
763:   char        type[256];
764:   const char *def;

766:   PetscFunctionBegin;
767:   PetscObjectOptionsBegin((PetscObject)part);
768:   if (!((PetscObject)part)->type_name) {
769: #if defined(PETSC_HAVE_PARMETIS)
770:     def = MATPARTITIONINGPARMETIS;
771: #elif defined(PETSC_HAVE_CHACO)
772:     def = MATPARTITIONINGCHACO;
773: #elif defined(PETSC_HAVE_PARTY)
774:     def = MATPARTITIONINGPARTY;
775: #elif defined(PETSC_HAVE_PTSCOTCH)
776:     def = MATPARTITIONINGPTSCOTCH;
777: #else
778:     def = MATPARTITIONINGCURRENT;
779: #endif
780:   } else {
781:     def = ((PetscObject)part)->type_name;
782:   }
783:   PetscCall(PetscOptionsFList("-mat_partitioning_type", "Type of partitioner", "MatPartitioningSetType", MatPartitioningList, def, type, 256, &flag));
784:   if (flag) PetscCall(MatPartitioningSetType(part, type));

786:   PetscCall(PetscOptionsInt("-mat_partitioning_nparts", "number of fine parts", NULL, part->n, &part->n, &flag));

788:   PetscCall(PetscOptionsBool("-mat_partitioning_use_edge_weights", "whether or not to use edge weights", NULL, part->use_edge_weights, &part->use_edge_weights, &flag));

790:   /*
791:     Set the type if it was never set.
792:   */
793:   if (!((PetscObject)part)->type_name) PetscCall(MatPartitioningSetType(part, def));

795:   PetscTryTypeMethod(part, setfromoptions, PetscOptionsObject);
796:   PetscOptionsEnd();
797:   PetscFunctionReturn(PETSC_SUCCESS);
798: }

800: /*@C
801:    MatPartitioningSetNumberVertexWeights - Sets the number of weights per vertex

803:    Not Collective

805:    Input Parameters:
806: +  partitioning - the partitioning context
807: -  ncon - the number of weights

809:    Level: intermediate

811: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetVertexWeights()`
812: @*/
813: PetscErrorCode MatPartitioningSetNumberVertexWeights(MatPartitioning partitioning, PetscInt ncon)
814: {
815:   PetscFunctionBegin;
817:   partitioning->ncon = ncon;
818:   PetscFunctionReturn(PETSC_SUCCESS);
819: }