Actual source code: partsimple.c
2: #include <petscvec.h>
3: #include <petsc/private/partitionerimpl.h>
5: typedef struct {
6: PetscBool useGrid; /* Flag to use a grid layout */
7: PetscInt gridDim; /* The grid dimension */
8: PetscInt nodeGrid[3]; /* Dimension of node grid */
9: PetscInt processGrid[3]; /* Dimension of local process grid on each node */
10: } PetscPartitioner_Simple;
12: static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
13: {
14: PetscFunctionBegin;
15: PetscCall(PetscFree(part->data));
16: PetscFunctionReturn(PETSC_SUCCESS);
17: }
19: static PetscErrorCode PetscPartitionerView_Simple_ASCII(PetscPartitioner part, PetscViewer viewer)
20: {
21: PetscFunctionBegin;
22: PetscFunctionReturn(PETSC_SUCCESS);
23: }
25: static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
26: {
27: PetscBool iascii;
29: PetscFunctionBegin;
32: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
33: if (iascii) PetscCall(PetscPartitionerView_Simple_ASCII(part, viewer));
34: PetscFunctionReturn(PETSC_SUCCESS);
35: }
37: static PetscErrorCode PetscPartitionerSetFromOptions_Simple(PetscPartitioner part, PetscOptionItems *PetscOptionsObject)
38: {
39: PetscPartitioner_Simple *p = (PetscPartitioner_Simple *)part->data;
40: PetscInt num, i;
41: PetscBool flg;
43: PetscFunctionBegin;
44: for (i = 0; i < 3; ++i) p->processGrid[i] = p->nodeGrid[i] = 1;
45: PetscOptionsHeadBegin(PetscOptionsObject, "PetscPartitioner Simple Options");
46: num = 3;
47: PetscCall(PetscOptionsIntArray("-petscpartitioner_simple_node_grid", "Number of nodes in each dimension", "", p->nodeGrid, &num, &flg));
48: if (flg) {
49: p->useGrid = PETSC_TRUE;
50: p->gridDim = num;
51: }
52: num = 3;
53: PetscCall(PetscOptionsIntArray("-petscpartitioner_simple_process_grid", "Number of local processes in each dimension for a given node", "", p->processGrid, &num, &flg));
54: if (flg) {
55: p->useGrid = PETSC_TRUE;
56: if (p->gridDim < 0) p->gridDim = num;
57: else PetscCheck(p->gridDim == num, PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_INCOMP, "Process grid dimension %" PetscInt_FMT " != %" PetscInt_FMT " node grid dimension", num, p->gridDim);
58: }
59: PetscOptionsHeadEnd();
60: PetscFunctionReturn(PETSC_SUCCESS);
61: }
63: static PetscErrorCode PetscPartitionerPartition_Simple_Grid(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
64: {
65: PetscPartitioner_Simple *p = (PetscPartitioner_Simple *)part->data;
66: const PetscInt *nodes = p->nodeGrid;
67: const PetscInt *procs = p->processGrid;
68: PetscInt *cellproc, *offsets, cells[3] = {1, 1, 1}, pcells[3] = {1, 1, 1};
69: PetscInt Np = 1, Nr, np, nk, nj, ni, pk, pj, pi, ck, cj, ci, i;
70: MPI_Comm comm;
71: PetscMPIInt size;
73: PetscFunctionBegin;
74: if (vertSection) PetscCall(PetscInfo(part, "PETSCPARTITIONERSIMPLE ignores vertex weights when using grid partition\n"));
75: if (targetSection) PetscCall(PetscInfo(part, "PETSCPARTITIONERSIMPLE ignores partition weights when using grid partition\n"));
76: PetscCall(PetscObjectGetComm((PetscObject)part, &comm));
77: PetscCallMPI(MPI_Comm_size(comm, &size));
78: /* Check grid */
79: for (i = 0; i < 3; ++i) Np *= nodes[i] * procs[i];
80: PetscCheck(nparts == Np, comm, PETSC_ERR_ARG_INCOMP, "Number of partitions %" PetscInt_FMT " != %" PetscInt_FMT " grid size", nparts, Np);
81: PetscCheck(nparts == size, comm, PETSC_ERR_ARG_INCOMP, "Number of partitions %" PetscInt_FMT " != %d processes", nparts, size);
82: PetscCheck(numVertices % nparts == 0, comm, PETSC_ERR_ARG_INCOMP, "Number of cells %" PetscInt_FMT " is not divisible by number of partitions %" PetscInt_FMT, numVertices, nparts);
83: for (i = 0; i < p->gridDim; ++i) cells[i] = nodes[i] * procs[i];
84: Nr = numVertices / nparts;
85: while (Nr > 1) {
86: for (i = 0; i < p->gridDim; ++i) {
87: cells[i] *= 2;
88: Nr /= 2;
89: }
90: }
91: PetscCheck(!numVertices || Nr == 1, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Odd number of cells %" PetscInt_FMT ". Must be nprocs*2^k", numVertices);
92: for (i = 0; i < p->gridDim; ++i) {
93: PetscCheck(cells[i] % (nodes[i] * procs[i]) == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "dir %" PetscInt_FMT ". Number of cells (%" PetscInt_FMT ") mod number of processors %" PetscInt_FMT, i, cells[i], nodes[i] * procs[i]);
94: pcells[i] = cells[i] / (nodes[i] * procs[i]);
95: }
96: /* Compute sizes */
97: for (np = 0; np < nparts; ++np) PetscCall(PetscSectionSetDof(partSection, np, numVertices / nparts));
98: PetscCall(PetscSectionSetUp(partSection));
99: PetscCall(PetscCalloc1(nparts, &offsets));
100: for (np = 0; np < nparts; ++np) PetscCall(PetscSectionGetOffset(partSection, np, &offsets[np]));
101: if (!numVertices) pcells[0] = pcells[1] = pcells[2] = 0;
102: /* Compute partition */
103: PetscCall(PetscMalloc1(numVertices, &cellproc));
104: for (nk = 0; nk < nodes[2]; ++nk) {
105: for (nj = 0; nj < nodes[1]; ++nj) {
106: for (ni = 0; ni < nodes[0]; ++ni) {
107: const PetscInt nid = (nk * nodes[1] + nj) * nodes[0] + ni;
109: for (pk = 0; pk < procs[2]; ++pk) {
110: for (pj = 0; pj < procs[1]; ++pj) {
111: for (pi = 0; pi < procs[0]; ++pi) {
112: const PetscInt pid = ((nid * procs[2] + pk) * procs[1] + pj) * procs[0] + pi;
114: /* Assume that cells are originally numbered lexicographically */
115: for (ck = 0; ck < pcells[2]; ++ck) {
116: for (cj = 0; cj < pcells[1]; ++cj) {
117: for (ci = 0; ci < pcells[0]; ++ci) {
118: const PetscInt cid = (((nk * procs[2] + pk) * pcells[2] + ck) * cells[1] + ((nj * procs[1] + pj) * pcells[1] + cj)) * cells[0] + (ni * procs[0] + pi) * pcells[0] + ci;
120: cellproc[offsets[pid]++] = cid;
121: }
122: }
123: }
124: }
125: }
126: }
127: }
128: }
129: }
130: for (np = 1; np < nparts; ++np) PetscCheck(offsets[np] - offsets[np - 1] == numVertices / nparts, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Offset %" PetscInt_FMT " != %" PetscInt_FMT " partition size", offsets[np], numVertices / nparts);
131: PetscCall(PetscFree(offsets));
132: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numVertices, cellproc, PETSC_OWN_POINTER, partition));
133: PetscFunctionReturn(PETSC_SUCCESS);
134: }
136: static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
137: {
138: PetscPartitioner_Simple *p = (PetscPartitioner_Simple *)part->data;
139: MPI_Comm comm;
140: PetscInt np, *tpwgts = NULL, sumw = 0, numVerticesGlobal = 0;
141: PetscMPIInt size;
143: PetscFunctionBegin;
144: if (p->useGrid) {
145: PetscCall(PetscPartitionerPartition_Simple_Grid(part, nparts, numVertices, start, adjacency, vertSection, targetSection, partSection, partition));
146: PetscFunctionReturn(PETSC_SUCCESS);
147: }
148: if (vertSection) PetscCall(PetscInfo(part, "PETSCPARTITIONERSIMPLE ignores vertex weights\n"));
149: PetscCall(PetscObjectGetComm((PetscObject)part, &comm));
150: PetscCallMPI(MPI_Comm_size(comm, &size));
151: if (targetSection) {
152: PetscCall(MPIU_Allreduce(&numVertices, &numVerticesGlobal, 1, MPIU_INT, MPI_SUM, comm));
153: PetscCall(PetscCalloc1(nparts, &tpwgts));
154: for (np = 0; np < nparts; ++np) {
155: PetscCall(PetscSectionGetDof(targetSection, np, &tpwgts[np]));
156: sumw += tpwgts[np];
157: }
158: if (sumw) {
159: PetscInt m, mp;
160: for (np = 0; np < nparts; ++np) tpwgts[np] = (tpwgts[np] * numVerticesGlobal) / sumw;
161: for (np = 0, m = -1, mp = 0, sumw = 0; np < nparts; ++np) {
162: if (m < tpwgts[np]) {
163: m = tpwgts[np];
164: mp = np;
165: }
166: sumw += tpwgts[np];
167: }
168: if (sumw != numVerticesGlobal) tpwgts[mp] += numVerticesGlobal - sumw;
169: }
170: if (!sumw) PetscCall(PetscFree(tpwgts));
171: }
173: PetscCall(ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition));
174: if (size == 1) {
175: if (tpwgts) {
176: for (np = 0; np < nparts; ++np) PetscCall(PetscSectionSetDof(partSection, np, tpwgts[np]));
177: } else {
178: for (np = 0; np < nparts; ++np) PetscCall(PetscSectionSetDof(partSection, np, numVertices / nparts + ((numVertices % nparts) > np)));
179: }
180: } else {
181: if (tpwgts) {
182: Vec v;
183: PetscScalar *array;
184: PetscInt st, j;
185: PetscMPIInt rank;
187: PetscCall(VecCreate(comm, &v));
188: PetscCall(VecSetSizes(v, numVertices, numVerticesGlobal));
189: PetscCall(VecSetType(v, VECSTANDARD));
190: PetscCallMPI(MPI_Comm_rank(comm, &rank));
191: for (np = 0, st = 0; np < nparts; ++np) {
192: if (rank == np || (rank == size - 1 && size < nparts && np >= size)) {
193: for (j = 0; j < tpwgts[np]; j++) PetscCall(VecSetValue(v, st + j, np, INSERT_VALUES));
194: }
195: st += tpwgts[np];
196: }
197: PetscCall(VecAssemblyBegin(v));
198: PetscCall(VecAssemblyEnd(v));
199: PetscCall(VecGetArray(v, &array));
200: for (j = 0; j < numVertices; ++j) PetscCall(PetscSectionAddDof(partSection, PetscRealPart(array[j]), 1));
201: PetscCall(VecRestoreArray(v, &array));
202: PetscCall(VecDestroy(&v));
203: } else {
204: PetscMPIInt rank;
205: PetscInt nvGlobal, *offsets, myFirst, myLast;
207: PetscCall(PetscMalloc1(size + 1, &offsets));
208: offsets[0] = 0;
209: PetscCallMPI(MPI_Allgather(&numVertices, 1, MPIU_INT, &offsets[1], 1, MPIU_INT, comm));
210: for (np = 2; np <= size; np++) offsets[np] += offsets[np - 1];
211: nvGlobal = offsets[size];
212: PetscCallMPI(MPI_Comm_rank(comm, &rank));
213: myFirst = offsets[rank];
214: myLast = offsets[rank + 1] - 1;
215: PetscCall(PetscFree(offsets));
216: if (numVertices) {
217: PetscInt firstPart = 0, firstLargePart = 0;
218: PetscInt lastPart = 0, lastLargePart = 0;
219: PetscInt rem = nvGlobal % nparts;
220: PetscInt pSmall = nvGlobal / nparts;
221: PetscInt pBig = nvGlobal / nparts + 1;
223: if (rem) {
224: firstLargePart = myFirst / pBig;
225: lastLargePart = myLast / pBig;
227: if (firstLargePart < rem) {
228: firstPart = firstLargePart;
229: } else {
230: firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
231: }
232: if (lastLargePart < rem) {
233: lastPart = lastLargePart;
234: } else {
235: lastPart = rem + (myLast - (rem * pBig)) / pSmall;
236: }
237: } else {
238: firstPart = myFirst / (nvGlobal / nparts);
239: lastPart = myLast / (nvGlobal / nparts);
240: }
242: for (np = firstPart; np <= lastPart; np++) {
243: PetscInt PartStart = np * (nvGlobal / nparts) + PetscMin(nvGlobal % nparts, np);
244: PetscInt PartEnd = (np + 1) * (nvGlobal / nparts) + PetscMin(nvGlobal % nparts, np + 1);
246: PartStart = PetscMax(PartStart, myFirst);
247: PartEnd = PetscMin(PartEnd, myLast + 1);
248: PetscCall(PetscSectionSetDof(partSection, np, PartEnd - PartStart));
249: }
250: }
251: }
252: }
253: PetscCall(PetscFree(tpwgts));
254: PetscFunctionReturn(PETSC_SUCCESS);
255: }
257: static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
258: {
259: PetscFunctionBegin;
260: part->noGraph = PETSC_TRUE;
261: part->ops->view = PetscPartitionerView_Simple;
262: part->ops->setfromoptions = PetscPartitionerSetFromOptions_Simple;
263: part->ops->destroy = PetscPartitionerDestroy_Simple;
264: part->ops->partition = PetscPartitionerPartition_Simple;
265: PetscFunctionReturn(PETSC_SUCCESS);
266: }
268: /*MC
269: PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
271: Level: intermediate
273: .seealso: `PetscPartitionerType`, `PetscPartitionerCreate()`, `PetscPartitionerSetType()`
274: M*/
276: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
277: {
278: PetscPartitioner_Simple *p;
280: PetscFunctionBegin;
282: PetscCall(PetscNew(&p));
283: p->gridDim = -1;
284: part->data = p;
286: PetscCall(PetscPartitionerInitialize_Simple(part));
287: PetscFunctionReturn(PETSC_SUCCESS);
288: }