Actual source code: ex83.c
2: static char help[] = "Partition tiny grid using hierarchical partitioning and increase overlap using MatIncreaseOverlapSplit.\n\n";
4: /*
5: Include "petscmat.h" so that we can use matrices. Note that this file
6: automatically includes:
7: petscsys.h - base PETSc routines petscvec.h - vectors
8: petscmat.h - matrices
9: petscis.h - index sets
10: petscviewer.h - viewers
11: */
12: #include <petscmat.h>
14: int main(int argc, char **args)
15: {
16: Mat A, B;
17: PetscMPIInt rank, size, membershipKey;
18: PetscInt *ia, *ja, *indices_sc, isrows_localsize;
19: const PetscInt *indices;
20: MatPartitioning part;
21: IS is, isrows, isrows_sc;
22: IS coarseparts, fineparts;
23: MPI_Comm comm, scomm;
25: PetscFunctionBeginUser;
26: PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
27: comm = PETSC_COMM_WORLD;
28: PetscCallMPI(MPI_Comm_size(comm, &size));
29: PetscCheck(size == 4, comm, PETSC_ERR_WRONG_MPI_SIZE, "Must run with 4 processors ");
30: PetscCallMPI(MPI_Comm_rank(comm, &rank));
31: /*set a small matrix */
32: PetscCall(PetscMalloc1(5, &ia));
33: PetscCall(PetscMalloc1(16, &ja));
34: if (rank == 0) {
35: ja[0] = 1;
36: ja[1] = 4;
37: ja[2] = 0;
38: ja[3] = 2;
39: ja[4] = 5;
40: ja[5] = 1;
41: ja[6] = 3;
42: ja[7] = 6;
43: ja[8] = 2;
44: ja[9] = 7;
45: ia[0] = 0;
46: ia[1] = 2;
47: ia[2] = 5;
48: ia[3] = 8;
49: ia[4] = 10;
50: membershipKey = 0;
51: } else if (rank == 1) {
52: ja[0] = 0;
53: ja[1] = 5;
54: ja[2] = 8;
55: ja[3] = 1;
56: ja[4] = 4;
57: ja[5] = 6;
58: ja[6] = 9;
59: ja[7] = 2;
60: ja[8] = 5;
61: ja[9] = 7;
62: ja[10] = 10;
63: ja[11] = 3;
64: ja[12] = 6;
65: ja[13] = 11;
66: ia[0] = 0;
67: ia[1] = 3;
68: ia[2] = 7;
69: ia[3] = 11;
70: ia[4] = 14;
71: membershipKey = 0;
72: } else if (rank == 2) {
73: ja[0] = 4;
74: ja[1] = 9;
75: ja[2] = 12;
76: ja[3] = 5;
77: ja[4] = 8;
78: ja[5] = 10;
79: ja[6] = 13;
80: ja[7] = 6;
81: ja[8] = 9;
82: ja[9] = 11;
83: ja[10] = 14;
84: ja[11] = 7;
85: ja[12] = 10;
86: ja[13] = 15;
87: ia[0] = 0;
88: ia[1] = 3;
89: ia[2] = 7;
90: ia[3] = 11;
91: ia[4] = 14;
92: membershipKey = 1;
93: } else {
94: ja[0] = 8;
95: ja[1] = 13;
96: ja[2] = 9;
97: ja[3] = 12;
98: ja[4] = 14;
99: ja[5] = 10;
100: ja[6] = 13;
101: ja[7] = 15;
102: ja[8] = 11;
103: ja[9] = 14;
104: ia[0] = 0;
105: ia[1] = 2;
106: ia[2] = 5;
107: ia[3] = 8;
108: ia[4] = 10;
109: membershipKey = 1;
110: }
111: PetscCall(MatCreateMPIAdj(comm, 4, 16, ia, ja, NULL, &A));
112: PetscCall(MatView(A, PETSC_VIEWER_STDOUT_WORLD));
113: /*
114: Partition the graph of the matrix
115: */
116: PetscCall(MatPartitioningCreate(comm, &part));
117: PetscCall(MatPartitioningSetAdjacency(part, A));
118: PetscCall(MatPartitioningSetType(part, MATPARTITIONINGHIERARCH));
119: PetscCall(MatPartitioningHierarchicalSetNcoarseparts(part, 2));
120: PetscCall(MatPartitioningHierarchicalSetNfineparts(part, 2));
121: PetscCall(MatPartitioningSetFromOptions(part));
122: /* get new processor owner number of each vertex */
123: PetscCall(MatPartitioningApply(part, &is));
124: /* coarse parts */
125: PetscCall(MatPartitioningHierarchicalGetCoarseparts(part, &coarseparts));
126: PetscCall(ISView(coarseparts, PETSC_VIEWER_STDOUT_WORLD));
127: /* fine parts */
128: PetscCall(MatPartitioningHierarchicalGetFineparts(part, &fineparts));
129: PetscCall(ISView(fineparts, PETSC_VIEWER_STDOUT_WORLD));
130: /* partitioning */
131: PetscCall(ISView(is, PETSC_VIEWER_STDOUT_WORLD));
132: /* compute coming rows */
133: PetscCall(ISBuildTwoSided(is, NULL, &isrows));
134: PetscCall(ISView(isrows, PETSC_VIEWER_STDOUT_WORLD));
135: /*create a sub-communicator */
136: PetscCallMPI(MPI_Comm_split(comm, membershipKey, rank, &scomm));
137: PetscCall(ISGetLocalSize(isrows, &isrows_localsize));
138: PetscCall(PetscMalloc1(isrows_localsize, &indices_sc));
139: PetscCall(ISGetIndices(isrows, &indices));
140: PetscCall(PetscArraycpy(indices_sc, indices, isrows_localsize));
141: PetscCall(ISRestoreIndices(isrows, &indices));
142: PetscCall(ISDestroy(&is));
143: PetscCall(ISDestroy(&coarseparts));
144: PetscCall(ISDestroy(&fineparts));
145: PetscCall(ISDestroy(&isrows));
146: PetscCall(MatPartitioningDestroy(&part));
147: /*create a sub-IS on the sub communicator */
148: PetscCall(ISCreateGeneral(scomm, isrows_localsize, indices_sc, PETSC_OWN_POINTER, &isrows_sc));
149: PetscCall(MatConvert(A, MATMPIAIJ, MAT_INITIAL_MATRIX, &B));
150: #if 1
151: PetscCall(MatView(B, PETSC_VIEWER_STDOUT_WORLD));
152: #endif
153: /*increase overlap */
154: PetscCall(MatIncreaseOverlapSplit(B, 1, &isrows_sc, 1));
155: PetscCall(ISView(isrows_sc, NULL));
156: PetscCall(ISDestroy(&isrows_sc));
157: /*
158: Free work space. All PETSc objects should be destroyed when they
159: are no longer needed.
160: */
161: PetscCall(MatDestroy(&A));
162: PetscCall(MatDestroy(&B));
163: PetscCall(PetscFinalize());
164: return 0;
165: }