Actual source code: ex24.c

  1: static char help[]     = "Test that MatPartitioning and PetscPartitioner interfaces are equivalent when using PETSCPARTITIONERMATPARTITIONING\n\n";
  2: static char FILENAME[] = "ex24.c";

  4: #include <petscdmplex.h>
  5: #include <petscviewerhdf5.h>

  7: #if defined(PETSC_HAVE_PTSCOTCH)
  8: EXTERN_C_BEGIN
  9:   #include <ptscotch.h>
 10: EXTERN_C_END
 11: #endif

 13: typedef struct {
 14:   PetscBool compare_is; /* Compare ISs and PetscSections */
 15:   PetscBool compare_dm; /* Compare DM */
 16:   PetscBool tpw;        /* Use target partition weights */
 17:   char      partitioning[64];
 18:   char      repartitioning[64];
 19: } AppCtx;

 21: static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
 22: {
 23:   PetscBool repartition = PETSC_TRUE;

 25:   PetscFunctionBegin;
 26:   options->compare_is = PETSC_FALSE;
 27:   options->compare_dm = PETSC_FALSE;

 29:   PetscOptionsBegin(comm, "", "Meshing Interpolation Test Options", "DMPLEX");
 30:   PetscCall(PetscOptionsBool("-compare_is", "Compare ISs and PetscSections?", FILENAME, options->compare_is, &options->compare_is, NULL));
 31:   PetscCall(PetscOptionsBool("-compare_dm", "Compare DMs?", FILENAME, options->compare_dm, &options->compare_dm, NULL));
 32:   PetscCall(PetscStrncpy(options->partitioning, MATPARTITIONINGPARMETIS, sizeof(options->partitioning)));
 33:   PetscCall(PetscOptionsString("-partitioning", "The mat partitioning type to test", "None", options->partitioning, options->partitioning, sizeof(options->partitioning), NULL));
 34:   PetscCall(PetscOptionsBool("-repartition", "Partition again after the first partition?", FILENAME, repartition, &repartition, NULL));
 35:   if (repartition) {
 36:     PetscCall(PetscStrncpy(options->repartitioning, MATPARTITIONINGPARMETIS, 64));
 37:     PetscCall(PetscOptionsString("-repartitioning", "The mat partitioning type to test (second partitioning)", "None", options->repartitioning, options->repartitioning, sizeof(options->repartitioning), NULL));
 38:   } else {
 39:     options->repartitioning[0] = '\0';
 40:   }
 41:   PetscCall(PetscOptionsBool("-tpweight", "Use target partition weights", FILENAME, options->tpw, &options->tpw, NULL));
 42:   PetscOptionsEnd();
 43:   PetscFunctionReturn(PETSC_SUCCESS);
 44: }

 46: static PetscErrorCode ScotchResetRandomSeed()
 47: {
 48:   PetscFunctionBegin;
 49: #if defined(PETSC_HAVE_PTSCOTCH)
 50:   SCOTCH_randomReset();
 51: #endif
 52:   PetscFunctionReturn(PETSC_SUCCESS);
 53: }

 55: static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
 56: {
 57:   PetscFunctionBegin;
 58:   PetscCall(DMCreate(comm, dm));
 59:   PetscCall(DMSetType(*dm, DMPLEX));
 60:   PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
 61:   PetscCall(DMSetFromOptions(*dm));
 62:   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
 63:   PetscFunctionReturn(PETSC_SUCCESS);
 64: }

 66: int main(int argc, char **argv)
 67: {
 68:   MPI_Comm               comm;
 69:   DM                     dm1, dm2, dmdist1, dmdist2;
 70:   DMPlexInterpolatedFlag interp;
 71:   MatPartitioning        mp;
 72:   PetscPartitioner       part1, part2;
 73:   AppCtx                 user;
 74:   IS                     is1 = NULL, is2 = NULL;
 75:   IS                     is1g, is2g;
 76:   PetscSection           s1 = NULL, s2 = NULL, tpws = NULL;
 77:   PetscInt               i;
 78:   PetscBool              flg;
 79:   PetscMPIInt            size;

 81:   PetscFunctionBeginUser;
 82:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
 83:   comm = PETSC_COMM_WORLD;
 84:   PetscCallMPI(MPI_Comm_size(comm, &size));
 85:   PetscCall(ProcessOptions(comm, &user));
 86:   PetscCall(CreateMesh(comm, &user, &dm1));
 87:   PetscCall(CreateMesh(comm, &user, &dm2));

 89:   if (user.tpw) {
 90:     PetscBool isscotch;

 92:     PetscCall(PetscSectionCreate(comm, &tpws));
 93:     PetscCall(PetscSectionSetChart(tpws, 0, size));
 94:     for (i = 0; i < size; i++) {
 95:       PetscInt tdof = i % 2 ? 2 * i - 1 : i + 2;
 96:       PetscCall(PetscSectionSetDof(tpws, i, tdof));
 97:     }
 98:     // PTScotch cannot have a zero partition weight
 99:     PetscCall(PetscStrncmp(user.partitioning, PETSCPARTITIONERPTSCOTCH, sizeof(PETSCPARTITIONERPTSCOTCH), &isscotch));
100:     if (!isscotch && size > 1) { /* test zero tpw entry */
101:       PetscCall(PetscSectionSetDof(tpws, 0, 0));
102:     }
103:     PetscCall(PetscSectionSetUp(tpws));
104:   }

106:   /* partition dm1 using PETSCPARTITIONERPARMETIS */
107:   PetscCall(ScotchResetRandomSeed());
108:   PetscCall(DMPlexGetPartitioner(dm1, &part1));
109:   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part1, "p1_"));
110:   PetscCall(PetscPartitionerSetType(part1, user.partitioning));
111:   PetscCall(PetscPartitionerSetFromOptions(part1));
112:   PetscCall(PetscSectionCreate(comm, &s1));
113:   PetscCall(PetscPartitionerDMPlexPartition(part1, dm1, tpws, s1, &is1));

115:   /* partition dm2 using PETSCPARTITIONERMATPARTITIONING with MATPARTITIONINGPARMETIS */
116:   PetscCall(ScotchResetRandomSeed());
117:   PetscCall(DMPlexGetPartitioner(dm2, &part2));
118:   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part2, "p2_"));
119:   PetscCall(PetscPartitionerSetType(part2, PETSCPARTITIONERMATPARTITIONING));
120:   PetscCall(PetscPartitionerMatPartitioningGetMatPartitioning(part2, &mp));
121:   PetscCall(MatPartitioningSetType(mp, user.partitioning));
122:   PetscCall(PetscPartitionerSetFromOptions(part2));
123:   PetscCall(PetscSectionCreate(comm, &s2));
124:   PetscCall(PetscPartitionerDMPlexPartition(part2, dm2, tpws, s2, &is2));

126:   PetscCall(ISOnComm(is1, comm, PETSC_USE_POINTER, &is1g));
127:   PetscCall(ISOnComm(is2, comm, PETSC_USE_POINTER, &is2g));
128:   PetscCall(ISViewFromOptions(is1g, NULL, "-seq_is1_view"));
129:   PetscCall(ISViewFromOptions(is2g, NULL, "-seq_is2_view"));
130:   /* compare the two ISs */
131:   if (user.compare_is) {
132:     PetscCall(ISEqualUnsorted(is1g, is2g, &flg));
133:     if (!flg) PetscCall(PetscPrintf(comm, "ISs are not equal with type %s with size %d.\n", user.partitioning, size));
134:   }
135:   PetscCall(ISDestroy(&is1g));
136:   PetscCall(ISDestroy(&is2g));

138:   /* compare the two PetscSections */
139:   PetscCall(PetscSectionViewFromOptions(s1, NULL, "-seq_s1_view"));
140:   PetscCall(PetscSectionViewFromOptions(s2, NULL, "-seq_s2_view"));
141:   if (user.compare_is) {
142:     PetscCall(PetscSectionCompare(s1, s2, &flg));
143:     if (!flg) PetscCall(PetscPrintf(comm, "PetscSections are not equal with %s with size %d.\n", user.partitioning, size));
144:   }

146:   /* distribute both DMs */
147:   PetscCall(ScotchResetRandomSeed());
148:   PetscCall(DMPlexDistribute(dm1, 0, NULL, &dmdist1));
149:   PetscCall(ScotchResetRandomSeed());
150:   PetscCall(DMPlexDistribute(dm2, 0, NULL, &dmdist2));

152:   /* cleanup */
153:   PetscCall(PetscSectionDestroy(&tpws));
154:   PetscCall(PetscSectionDestroy(&s1));
155:   PetscCall(PetscSectionDestroy(&s2));
156:   PetscCall(ISDestroy(&is1));
157:   PetscCall(ISDestroy(&is2));
158:   PetscCall(DMDestroy(&dm1));
159:   PetscCall(DMDestroy(&dm2));

161:   /* if distributed DMs are NULL (sequential case), then quit */
162:   if (!dmdist1 && !dmdist2) return 0;

164:   PetscCall(DMViewFromOptions(dmdist1, NULL, "-dm_dist1_view"));
165:   PetscCall(DMViewFromOptions(dmdist2, NULL, "-dm_dist2_view"));

167:   /* compare the two distributed DMs */
168:   if (user.compare_dm) {
169:     PetscCall(DMPlexEqual(dmdist1, dmdist2, &flg));
170:     if (!flg) PetscCall(PetscPrintf(comm, "Distributed DMs are not equal %s with size %d.\n", user.partitioning, size));
171:   }

173:   /* if repartitioning is disabled, then quit */
174:   if (user.repartitioning[0] == '\0') return 0;

176:   if (user.tpw) {
177:     PetscCall(PetscSectionCreate(comm, &tpws));
178:     PetscCall(PetscSectionSetChart(tpws, 0, size));
179:     for (i = 0; i < size; i++) {
180:       PetscInt tdof = i % 2 ? i + 1 : size - i;
181:       PetscCall(PetscSectionSetDof(tpws, i, tdof));
182:     }
183:     PetscCall(PetscSectionSetUp(tpws));
184:   }

186:   /* repartition distributed DM dmdist1 */
187:   PetscCall(ScotchResetRandomSeed());
188:   PetscCall(DMPlexGetPartitioner(dmdist1, &part1));
189:   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part1, "dp1_"));
190:   PetscCall(PetscPartitionerSetType(part1, user.repartitioning));
191:   PetscCall(PetscPartitionerSetFromOptions(part1));
192:   PetscCall(PetscSectionCreate(comm, &s1));
193:   PetscCall(PetscPartitionerDMPlexPartition(part1, dmdist1, tpws, s1, &is1));

195:   /* repartition distributed DM dmdist2 */
196:   PetscCall(ScotchResetRandomSeed());
197:   PetscCall(DMPlexGetPartitioner(dmdist2, &part2));
198:   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part2, "dp2_"));
199:   PetscCall(PetscPartitionerSetType(part2, PETSCPARTITIONERMATPARTITIONING));
200:   PetscCall(PetscPartitionerMatPartitioningGetMatPartitioning(part2, &mp));
201:   PetscCall(MatPartitioningSetType(mp, user.repartitioning));
202:   PetscCall(PetscPartitionerSetFromOptions(part2));
203:   PetscCall(PetscSectionCreate(comm, &s2));
204:   PetscCall(PetscPartitionerDMPlexPartition(part2, dmdist2, tpws, s2, &is2));

206:   /* compare the two ISs */
207:   PetscCall(ISOnComm(is1, comm, PETSC_USE_POINTER, &is1g));
208:   PetscCall(ISOnComm(is2, comm, PETSC_USE_POINTER, &is2g));
209:   PetscCall(ISViewFromOptions(is1g, NULL, "-dist_is1_view"));
210:   PetscCall(ISViewFromOptions(is2g, NULL, "-dist_is2_view"));
211:   if (user.compare_is) {
212:     PetscCall(ISEqualUnsorted(is1g, is2g, &flg));
213:     if (!flg) PetscCall(PetscPrintf(comm, "Distributed ISs are not equal, with %s with size %d.\n", user.repartitioning, size));
214:   }
215:   PetscCall(ISDestroy(&is1g));
216:   PetscCall(ISDestroy(&is2g));

218:   /* compare the two PetscSections */
219:   PetscCall(PetscSectionViewFromOptions(s1, NULL, "-dist_s1_view"));
220:   PetscCall(PetscSectionViewFromOptions(s2, NULL, "-dist_s2_view"));
221:   if (user.compare_is) {
222:     PetscCall(PetscSectionCompare(s1, s2, &flg));
223:     if (!flg) PetscCall(PetscPrintf(comm, "Distributed PetscSections are not equal, with %s with size %d.\n", user.repartitioning, size));
224:   }

226:   /* redistribute both distributed DMs */
227:   PetscCall(ScotchResetRandomSeed());
228:   PetscCall(DMPlexDistribute(dmdist1, 0, NULL, &dm1));
229:   PetscCall(ScotchResetRandomSeed());
230:   PetscCall(DMPlexDistribute(dmdist2, 0, NULL, &dm2));

232:   /* compare the two distributed DMs */
233:   PetscCall(DMPlexIsInterpolated(dm1, &interp));
234:   if (interp == DMPLEX_INTERPOLATED_NONE) {
235:     PetscCall(DMPlexEqual(dm1, dm2, &flg));
236:     if (!flg) PetscCall(PetscPrintf(comm, "Redistributed DMs are not equal, with %s with size %d.\n", user.repartitioning, size));
237:   }

239:   /* cleanup */
240:   PetscCall(PetscSectionDestroy(&tpws));
241:   PetscCall(PetscSectionDestroy(&s1));
242:   PetscCall(PetscSectionDestroy(&s2));
243:   PetscCall(ISDestroy(&is1));
244:   PetscCall(ISDestroy(&is2));
245:   PetscCall(DMDestroy(&dm1));
246:   PetscCall(DMDestroy(&dm2));
247:   PetscCall(DMDestroy(&dmdist1));
248:   PetscCall(DMDestroy(&dmdist2));
249:   PetscCall(PetscFinalize());
250:   return 0;
251: }

253: /*TEST

255:   test:
256:     # partition sequential mesh loaded from Exodus file
257:     suffix: 0
258:     nsize: {{1 2 3 4 8}}
259:     requires: chaco parmetis ptscotch exodusii
260:     args: -dm_plex_filename ${PETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
261:     args: -partitioning {{chaco parmetis ptscotch}} -repartitioning {{parmetis ptscotch}} -tpweight {{0 1}}
262:   test:
263:     # repartition mesh already partitioned naively by MED loader
264:     suffix: 1
265:     nsize: {{1 2 3 4 8}}
266:     TODO: MED
267:     requires: parmetis ptscotch med
268:     args: -dm_plex_filename ${PETSC_DIR}/share/petsc/datafiles/meshes/cylinder.med
269:     args: -repartition 0 -partitioning {{parmetis ptscotch}}
270:   test:
271:     # partition mesh generated by ctetgen using scotch, then repartition with scotch, diff view
272:     suffix: 3
273:     nsize: 4
274:     requires: ptscotch ctetgen
275:     args: -dm_plex_dim 3 -dm_plex_box_faces 2,3,2 -partitioning ptscotch -repartitioning ptscotch
276:     args: -p1_petscpartitioner_view -p2_petscpartitioner_view -dp1_petscpartitioner_view -dp2_petscpartitioner_view -tpweight {{0 1}}
277:   test:
278:     # partition mesh generated by ctetgen using partitioners supported both by MatPartitioning and PetscPartitioner
279:     suffix: 4
280:     nsize: {{1 2 3 4 8}}
281:     requires: chaco parmetis ptscotch ctetgen
282:     args: -dm_plex_dim 3 -dm_plex_box_faces {{2,3,4  5,4,3  7,11,5}} -partitioning {{chaco parmetis ptscotch}} -repartitioning {{parmetis ptscotch}} -tpweight {{0 1}}

284: TEST*/