Actual source code: partptscotch.c
1: #include <petsc/private/partitionerimpl.h>
3: #if defined(PETSC_HAVE_PTSCOTCH)
4: EXTERN_C_BEGIN
5: #include <ptscotch.h>
6: EXTERN_C_END
7: #endif
9: PetscBool PTScotchPartitionerCite = PETSC_FALSE;
10: const char PTScotchPartitionerCitation[] = "@article{PTSCOTCH,\n"
11: " author = {C. Chevalier and F. Pellegrini},\n"
12: " title = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
13: " journal = {Parallel Computing},\n"
14: " volume = {34},\n"
15: " number = {6},\n"
16: " pages = {318--331},\n"
17: " year = {2008},\n"
18: " doi = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
19: "}\n";
21: typedef struct {
22: MPI_Comm pcomm;
23: PetscInt strategy;
24: PetscReal imbalance;
25: } PetscPartitioner_PTScotch;
27: #if defined(PETSC_HAVE_PTSCOTCH)
29: #define PetscCallPTSCOTCH(...) \
30: do { \
31: PetscCheck(!(__VA_ARGS__), PETSC_COMM_SELF, PETSC_ERR_LIB, "Error calling PT-Scotch library"); \
32: } while (0)
34: static int PTScotch_Strategy(PetscInt strategy)
35: {
36: switch (strategy) {
37: case 0:
38: return SCOTCH_STRATDEFAULT;
39: case 1:
40: return SCOTCH_STRATQUALITY;
41: case 2:
42: return SCOTCH_STRATSPEED;
43: case 3:
44: return SCOTCH_STRATBALANCE;
45: case 4:
46: return SCOTCH_STRATSAFETY;
47: case 5:
48: return SCOTCH_STRATSCALABILITY;
49: case 6:
50: return SCOTCH_STRATRECURSIVE;
51: case 7:
52: return SCOTCH_STRATREMAP;
53: default:
54: return SCOTCH_STRATDEFAULT;
55: }
56: }
58: static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[], SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[])
59: {
60: SCOTCH_Arch archdat;
61: SCOTCH_Graph grafdat;
62: SCOTCH_Strat stradat;
63: SCOTCH_Num vertnbr = n;
64: SCOTCH_Num edgenbr = xadj[n];
65: SCOTCH_Num *velotab = vtxwgt;
66: SCOTCH_Num *edlotab = adjwgt;
67: SCOTCH_Num flagval = strategy;
68: double kbalval = imbalance;
70: PetscFunctionBegin;
71: if (!n) PetscFunctionReturn(PETSC_SUCCESS);
72: {
73: PetscBool flg = PETSC_TRUE;
74: PetscCall(PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight", NULL, "3.13", "Use -petscpartitioner_use_vertex_weights"));
75: PetscCall(PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL));
76: if (!flg) velotab = NULL;
77: }
78: PetscCallPTSCOTCH(SCOTCH_graphInit(&grafdat));
79: PetscCallPTSCOTCH(SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab));
80: PetscCallPTSCOTCH(SCOTCH_stratInit(&stradat));
81: PetscCallPTSCOTCH(SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval));
82: PetscCallPTSCOTCH(SCOTCH_archInit(&archdat));
83: if (tpart) {
84: PetscCallPTSCOTCH(SCOTCH_archCmpltw(&archdat, PetscMin(nparts, n), tpart));
85: } else {
86: PetscCallPTSCOTCH(SCOTCH_archCmplt(&archdat, PetscMin(nparts, n)));
87: }
88: PetscCallPTSCOTCH(SCOTCH_graphMap(&grafdat, &archdat, &stradat, part));
89: SCOTCH_archExit(&archdat);
90: SCOTCH_stratExit(&stradat);
91: SCOTCH_graphExit(&grafdat);
92: PetscFunctionReturn(PETSC_SUCCESS);
93: }
95: static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[], SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[], MPI_Comm comm)
96: {
97: PetscMPIInt procglbnbr;
98: PetscMPIInt proclocnum;
99: SCOTCH_Arch archdat;
100: SCOTCH_Dgraph grafdat;
101: SCOTCH_Dmapping mappdat;
102: SCOTCH_Strat stradat;
103: SCOTCH_Num vertlocnbr;
104: SCOTCH_Num edgelocnbr;
105: SCOTCH_Num *veloloctab = vtxwgt;
106: SCOTCH_Num *edloloctab = adjwgt;
107: SCOTCH_Num flagval = strategy;
108: double kbalval = imbalance;
110: PetscFunctionBegin;
111: {
112: PetscBool flg = PETSC_TRUE;
113: PetscCall(PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight", NULL, "3.13", "Use -petscpartitioner_use_vertex_weights"));
114: PetscCall(PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL));
115: if (!flg) veloloctab = NULL;
116: }
117: PetscCallMPI(MPI_Comm_size(comm, &procglbnbr));
118: PetscCallMPI(MPI_Comm_rank(comm, &proclocnum));
119: vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
120: edgelocnbr = xadj[vertlocnbr];
122: PetscCallPTSCOTCH(SCOTCH_dgraphInit(&grafdat, comm));
123: PetscCallPTSCOTCH(SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab));
124: PetscCallPTSCOTCH(SCOTCH_stratInit(&stradat));
125: PetscCallPTSCOTCH(SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval));
126: PetscCallPTSCOTCH(SCOTCH_archInit(&archdat));
127: if (tpart) { /* target partition weights */
128: PetscCallPTSCOTCH(SCOTCH_archCmpltw(&archdat, nparts, tpart));
129: } else {
130: PetscCallPTSCOTCH(SCOTCH_archCmplt(&archdat, nparts));
131: }
132: PetscCallPTSCOTCH(SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part));
133: PetscCallPTSCOTCH(SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat));
134: SCOTCH_dgraphMapExit(&grafdat, &mappdat);
135: SCOTCH_archExit(&archdat);
136: SCOTCH_stratExit(&stradat);
137: SCOTCH_dgraphExit(&grafdat);
138: PetscFunctionReturn(PETSC_SUCCESS);
139: }
141: #endif /* PETSC_HAVE_PTSCOTCH */
143: static const char *const PTScotchStrategyList[] = {"DEFAULT", "QUALITY", "SPEED", "BALANCE", "SAFETY", "SCALABILITY", "RECURSIVE", "REMAP"};
145: static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
146: {
147: PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *)part->data;
149: PetscFunctionBegin;
150: PetscCallMPI(MPI_Comm_free(&p->pcomm));
151: PetscCall(PetscFree(part->data));
152: PetscFunctionReturn(PETSC_SUCCESS);
153: }
155: static PetscErrorCode PetscPartitionerView_PTScotch_ASCII(PetscPartitioner part, PetscViewer viewer)
156: {
157: PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *)part->data;
159: PetscFunctionBegin;
160: PetscCall(PetscViewerASCIIPushTab(viewer));
161: PetscCall(PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n", PTScotchStrategyList[p->strategy]));
162: PetscCall(PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n", (double)p->imbalance));
163: PetscCall(PetscViewerASCIIPopTab(viewer));
164: PetscFunctionReturn(PETSC_SUCCESS);
165: }
167: static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
168: {
169: PetscBool iascii;
171: PetscFunctionBegin;
174: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
175: if (iascii) PetscCall(PetscPartitionerView_PTScotch_ASCII(part, viewer));
176: PetscFunctionReturn(PETSC_SUCCESS);
177: }
179: static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscPartitioner part, PetscOptionItems *PetscOptionsObject)
180: {
181: PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *)part->data;
182: const char *const *slist = PTScotchStrategyList;
183: PetscInt nlist = PETSC_STATIC_ARRAY_LENGTH(PTScotchStrategyList);
184: PetscBool flag;
186: PetscFunctionBegin;
187: PetscOptionsHeadBegin(PetscOptionsObject, "PetscPartitioner PTScotch Options");
188: PetscCall(PetscOptionsEList("-petscpartitioner_ptscotch_strategy", "Partitioning strategy", "", slist, nlist, slist[p->strategy], &p->strategy, &flag));
189: PetscCall(PetscOptionsReal("-petscpartitioner_ptscotch_imbalance", "Load imbalance ratio", "", p->imbalance, &p->imbalance, &flag));
190: PetscOptionsHeadEnd();
191: PetscFunctionReturn(PETSC_SUCCESS);
192: }
194: static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
195: {
196: #if defined(PETSC_HAVE_PTSCOTCH)
197: MPI_Comm comm;
198: PetscInt nvtxs = numVertices; /* The number of vertices in full graph */
199: PetscInt *vtxdist; /* Distribution of vertices across processes */
200: PetscInt *xadj = start; /* Start of edge list for each vertex */
201: PetscInt *adjncy = adjacency; /* Edge lists for all vertices */
202: PetscInt *vwgt = NULL; /* Vertex weights */
203: PetscInt *adjwgt = NULL; /* Edge weights */
204: PetscInt v, i, *assignment, *points;
205: PetscMPIInt size, rank, p;
206: PetscBool hasempty = PETSC_FALSE;
207: PetscInt *tpwgts = NULL;
209: PetscFunctionBegin;
210: PetscCall(PetscObjectGetComm((PetscObject)part, &comm));
211: PetscCallMPI(MPI_Comm_size(comm, &size));
212: PetscCallMPI(MPI_Comm_rank(comm, &rank));
213: PetscCall(PetscMalloc2(size + 1, &vtxdist, PetscMax(nvtxs, 1), &assignment));
214: /* Calculate vertex distribution */
215: vtxdist[0] = 0;
216: PetscCallMPI(MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm));
217: for (p = 2; p <= size; ++p) {
218: hasempty = (PetscBool)(hasempty || !vtxdist[p - 1] || !vtxdist[p]);
219: vtxdist[p] += vtxdist[p - 1];
220: }
221: /* null graph */
222: if (vtxdist[size] == 0) {
223: PetscCall(PetscFree2(vtxdist, assignment));
224: PetscCall(ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition));
225: PetscFunctionReturn(PETSC_SUCCESS);
226: }
228: /* Calculate vertex weights */
229: if (vertSection) {
230: PetscCall(PetscMalloc1(nvtxs, &vwgt));
231: for (v = 0; v < nvtxs; ++v) PetscCall(PetscSectionGetDof(vertSection, v, &vwgt[v]));
232: }
234: /* Calculate partition weights */
235: if (targetSection) {
236: PetscInt sumw;
238: PetscCall(PetscCalloc1(nparts, &tpwgts));
239: for (p = 0, sumw = 0; p < nparts; ++p) {
240: PetscCall(PetscSectionGetDof(targetSection, p, &tpwgts[p]));
241: sumw += tpwgts[p];
242: }
243: if (!sumw) PetscCall(PetscFree(tpwgts));
244: }
246: {
247: PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *)part->data;
248: int strat = PTScotch_Strategy(pts->strategy);
249: double imbal = (double)pts->imbalance;
251: for (p = 0; !vtxdist[p + 1] && p < size; ++p)
252: ;
253: if (vtxdist[p + 1] == vtxdist[size]) {
254: if (rank == p) PetscCall(PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment));
255: } else {
256: MPI_Comm pcomm = pts->pcomm;
258: if (hasempty) {
259: PetscInt cnt;
261: PetscCallMPI(MPI_Comm_split(pts->pcomm, !!nvtxs, rank, &pcomm));
262: for (p = 0, cnt = 0; p < size; p++) {
263: if (vtxdist[p + 1] != vtxdist[p]) {
264: vtxdist[cnt + 1] = vtxdist[p + 1];
265: cnt++;
266: }
267: }
268: };
269: if (nvtxs) PetscCall(PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment, pcomm));
270: if (hasempty) PetscCallMPI(MPI_Comm_free(&pcomm));
271: }
272: }
273: PetscCall(PetscFree(vwgt));
274: PetscCall(PetscFree(tpwgts));
276: /* Convert to PetscSection+IS */
277: for (v = 0; v < nvtxs; ++v) PetscCall(PetscSectionAddDof(partSection, assignment[v], 1));
278: PetscCall(PetscMalloc1(nvtxs, &points));
279: for (p = 0, i = 0; p < nparts; ++p) {
280: for (v = 0; v < nvtxs; ++v) {
281: if (assignment[v] == p) points[i++] = v;
282: }
283: }
284: PetscCheck(i == nvtxs, comm, PETSC_ERR_PLIB, "Number of points %" PetscInt_FMT " should be %" PetscInt_FMT, i, nvtxs);
285: PetscCall(ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition));
287: PetscCall(PetscFree2(vtxdist, assignment));
288: PetscFunctionReturn(PETSC_SUCCESS);
289: #else
290: SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
291: #endif
292: }
294: static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
295: {
296: PetscFunctionBegin;
297: part->noGraph = PETSC_FALSE;
298: part->ops->view = PetscPartitionerView_PTScotch;
299: part->ops->destroy = PetscPartitionerDestroy_PTScotch;
300: part->ops->partition = PetscPartitionerPartition_PTScotch;
301: part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
302: PetscFunctionReturn(PETSC_SUCCESS);
303: }
305: /*MC
306: PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
308: Level: intermediate
310: Options Database Keys:
311: + -petscpartitioner_ptscotch_strategy <string> - PT-Scotch strategy. Choose one of default quality speed balance safety scalability recursive remap
312: - -petscpartitioner_ptscotch_imbalance <val> - Load imbalance ratio
314: Notes: when the graph is on a single process, this partitioner actually uses Scotch and not PT-Scotch
316: .seealso: `PetscPartitionerType`, `PetscPartitionerCreate()`, `PetscPartitionerSetType()`
317: M*/
319: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
320: {
321: PetscPartitioner_PTScotch *p;
323: PetscFunctionBegin;
325: PetscCall(PetscNew(&p));
326: part->data = p;
328: PetscCallMPI(MPI_Comm_dup(PetscObjectComm((PetscObject)part), &p->pcomm));
329: p->strategy = 0;
330: p->imbalance = 0.01;
332: PetscCall(PetscPartitionerInitialize_PTScotch(part));
333: PetscCall(PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionerCite));
334: PetscFunctionReturn(PETSC_SUCCESS);
335: }