Actual source code: ex18.c
1: static char help[] = "Tests for parallel mesh loading and parallel topological interpolation\n\n";
3: #include <petsc/private/dmpleximpl.h>
4: /* List of test meshes
6: Network
7: -------
8: Test 0 (2 ranks):
10: network=0:
11: ---------
12: cell 0 cell 1 cell 2 nCells-1 (edge)
13: 0 ------ 1 ------ 2 ------ 3 -- -- v -- -- nCells (vertex)
15: vertex distribution:
16: rank 0: 0 1
17: rank 1: 2 3 ... nCells
18: cell(edge) distribution:
19: rank 0: 0 1
20: rank 1: 2 ... nCells-1
22: network=1:
23: ---------
24: v2
25: ^
26: |
27: cell 2
28: |
29: v0 --cell 0--> v3--cell 1--> v1
31: vertex distribution:
32: rank 0: 0 1 3
33: rank 1: 2
34: cell(edge) distribution:
35: rank 0: 0 1
36: rank 1: 2
38: example:
39: mpiexec -n 2 ./ex18 -distribute 1 -dim 1 -orig_dm_view -dist_dm_view -dist_dm_view -petscpartitioner_type parmetis -ncells 50
41: Triangle
42: --------
43: Test 0 (2 ranks):
44: Two triangles sharing a face
46: 2
47: / | \
48: / | \
49: / | \
50: 0 0 | 1 3
51: \ | /
52: \ | /
53: \ | /
54: 1
56: vertex distribution:
57: rank 0: 0 1
58: rank 1: 2 3
59: cell distribution:
60: rank 0: 0
61: rank 1: 1
63: Test 1 (3 ranks):
64: Four triangles partitioned across 3 ranks
66: 0 _______ 3
67: | \ / |
68: | \ 1 / |
69: | \ / |
70: | 0 2 2 |
71: | / \ |
72: | / 3 \ |
73: | / \ |
74: 1 ------- 4
76: vertex distribution:
77: rank 0: 0 1
78: rank 1: 2 3
79: rank 2: 4
80: cell distribution:
81: rank 0: 0
82: rank 1: 1
83: rank 2: 2 3
85: Test 2 (3 ranks):
86: Four triangles partitioned across 3 ranks
88: 1 _______ 3
89: | \ / |
90: | \ 1 / |
91: | \ / |
92: | 0 0 2 |
93: | / \ |
94: | / 3 \ |
95: | / \ |
96: 2 ------- 4
98: vertex distribution:
99: rank 0: 0 1
100: rank 1: 2 3
101: rank 2: 4
102: cell distribution:
103: rank 0: 0
104: rank 1: 1
105: rank 2: 2 3
107: Tetrahedron
108: -----------
109: Test 0:
110: Two tets sharing a face
112: cell 3 _______ cell
113: 0 / | \ \ 1
114: / | \ \
115: / | \ \
116: 0----|----4-----2
117: \ | / /
118: \ | / /
119: \ | / /
120: 1-------
121: y
122: | x
123: |/
124: *----z
126: vertex distribution:
127: rank 0: 0 1
128: rank 1: 2 3 4
129: cell distribution:
130: rank 0: 0
131: rank 1: 1
133: Quadrilateral
134: -------------
135: Test 0 (2 ranks):
136: Two quads sharing a face
138: 3-------2-------5
139: | | |
140: | 0 | 1 |
141: | | |
142: 0-------1-------4
144: vertex distribution:
145: rank 0: 0 1 2
146: rank 1: 3 4 5
147: cell distribution:
148: rank 0: 0
149: rank 1: 1
151: TODO Test 1:
152: A quad and a triangle sharing a face
154: 5-------4
155: | | \
156: | 0 | \
157: | | 1 \
158: 2-------3----6
160: Hexahedron
161: ----------
162: Test 0 (2 ranks):
163: Two hexes sharing a face
165: cell 7-------------6-------------11 cell
166: 0 /| /| /| 1
167: / | F1 / | F7 / |
168: / | / | / |
169: 4-------------5-------------10 |
170: | | F4 | | F10 | |
171: | | | | | |
172: |F5 | |F3 | |F9 |
173: | | F2 | | F8 | |
174: | 3---------|---2---------|---9
175: | / | / | /
176: | / F0 | / F6 | /
177: |/ |/ |/
178: 0-------------1-------------8
180: vertex distribution:
181: rank 0: 0 1 2 3 4 5
182: rank 1: 6 7 8 9 10 11
183: cell distribution:
184: rank 0: 0
185: rank 1: 1
187: */
189: typedef enum {
190: NONE,
191: CREATE,
192: AFTER_CREATE,
193: AFTER_DISTRIBUTE
194: } InterpType;
196: typedef struct {
197: PetscInt debug; /* The debugging level */
198: PetscInt testNum; /* Indicates the mesh to create */
199: PetscInt dim; /* The topological mesh dimension */
200: PetscBool cellSimplex; /* Use simplices or hexes */
201: PetscBool distribute; /* Distribute the mesh */
202: InterpType interpolate; /* Interpolate the mesh before or after DMPlexDistribute() */
203: PetscBool useGenerator; /* Construct mesh with a mesh generator */
204: PetscBool testOrientIF; /* Test for different original interface orientations */
205: PetscBool testHeavy; /* Run the heavy PointSF test */
206: PetscBool customView; /* Show results of DMPlexIsInterpolated() etc. */
207: PetscInt ornt[2]; /* Orientation of interface on rank 0 and rank 1 */
208: PetscInt faces[3]; /* Number of faces per dimension for generator */
209: PetscScalar coords[128];
210: PetscReal coordsTol;
211: PetscInt ncoords;
212: PetscInt pointsToExpand[128];
213: PetscInt nPointsToExpand;
214: PetscBool testExpandPointsEmpty;
215: char filename[PETSC_MAX_PATH_LEN]; /* Import mesh from file */
216: } AppCtx;
218: struct _n_PortableBoundary {
219: Vec coordinates;
220: PetscInt depth;
221: PetscSection *sections;
222: };
223: typedef struct _n_PortableBoundary *PortableBoundary;
225: #if defined(PETSC_USE_LOG)
226: static PetscLogStage stage[3];
227: #endif
229: static PetscErrorCode DMPlexCheckPointSFHeavy(DM, PortableBoundary);
230: static PetscErrorCode DMPlexSetOrientInterface_Private(DM, PetscBool);
231: static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM, PortableBoundary *);
232: static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM, IS, PetscSection, IS *);
234: static PetscErrorCode PortableBoundaryDestroy(PortableBoundary *bnd)
235: {
236: PetscInt d;
238: PetscFunctionBegin;
239: if (!*bnd) PetscFunctionReturn(PETSC_SUCCESS);
240: PetscCall(VecDestroy(&(*bnd)->coordinates));
241: for (d = 0; d < (*bnd)->depth; d++) PetscCall(PetscSectionDestroy(&(*bnd)->sections[d]));
242: PetscCall(PetscFree((*bnd)->sections));
243: PetscCall(PetscFree(*bnd));
244: PetscFunctionReturn(PETSC_SUCCESS);
245: }
247: static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
248: {
249: const char *interpTypes[4] = {"none", "create", "after_create", "after_distribute"};
250: PetscInt interp = NONE, dim;
251: PetscBool flg1, flg2;
253: PetscFunctionBegin;
254: options->debug = 0;
255: options->testNum = 0;
256: options->dim = 2;
257: options->cellSimplex = PETSC_TRUE;
258: options->distribute = PETSC_FALSE;
259: options->interpolate = NONE;
260: options->useGenerator = PETSC_FALSE;
261: options->testOrientIF = PETSC_FALSE;
262: options->testHeavy = PETSC_TRUE;
263: options->customView = PETSC_FALSE;
264: options->testExpandPointsEmpty = PETSC_FALSE;
265: options->ornt[0] = 0;
266: options->ornt[1] = 0;
267: options->faces[0] = 2;
268: options->faces[1] = 2;
269: options->faces[2] = 2;
270: options->filename[0] = '\0';
271: options->coordsTol = PETSC_DEFAULT;
273: PetscOptionsBegin(comm, "", "Meshing Interpolation Test Options", "DMPLEX");
274: PetscCall(PetscOptionsBoundedInt("-debug", "The debugging level", "ex18.c", options->debug, &options->debug, NULL, 0));
275: PetscCall(PetscOptionsBoundedInt("-testnum", "The mesh to create", "ex18.c", options->testNum, &options->testNum, NULL, 0));
276: PetscCall(PetscOptionsBool("-cell_simplex", "Generate simplices if true, otherwise hexes", "ex18.c", options->cellSimplex, &options->cellSimplex, NULL));
277: PetscCall(PetscOptionsBool("-distribute", "Distribute the mesh", "ex18.c", options->distribute, &options->distribute, NULL));
278: PetscCall(PetscOptionsEList("-interpolate", "Type of mesh interpolation (none, create, after_create, after_distribute)", "ex18.c", interpTypes, 4, interpTypes[options->interpolate], &interp, NULL));
279: options->interpolate = (InterpType)interp;
280: PetscCheck(options->distribute || options->interpolate != AFTER_DISTRIBUTE, comm, PETSC_ERR_SUP, "-interpolate after_distribute needs -distribute 1");
281: PetscCall(PetscOptionsBool("-use_generator", "Use a mesh generator to build the mesh", "ex18.c", options->useGenerator, &options->useGenerator, NULL));
282: options->ncoords = 128;
283: PetscCall(PetscOptionsScalarArray("-view_vertices_from_coords", "Print DAG points corresponding to vertices with given coordinates", "ex18.c", options->coords, &options->ncoords, NULL));
284: PetscCall(PetscOptionsReal("-view_vertices_from_coords_tol", "Tolerance for -view_vertices_from_coords", "ex18.c", options->coordsTol, &options->coordsTol, NULL));
285: options->nPointsToExpand = 128;
286: PetscCall(PetscOptionsIntArray("-test_expand_points", "Expand given array of DAG point using DMPlexGetConeRecursive() and print results", "ex18.c", options->pointsToExpand, &options->nPointsToExpand, NULL));
287: if (options->nPointsToExpand) PetscCall(PetscOptionsBool("-test_expand_points_empty", "For -test_expand_points, rank 0 will have empty input array", "ex18.c", options->testExpandPointsEmpty, &options->testExpandPointsEmpty, NULL));
288: PetscCall(PetscOptionsBool("-test_heavy", "Run the heavy PointSF test", "ex18.c", options->testHeavy, &options->testHeavy, NULL));
289: PetscCall(PetscOptionsBool("-custom_view", "Custom DMPlex view", "ex18.c", options->customView, &options->customView, NULL));
290: PetscCall(PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex18.c", options->dim, &options->dim, &flg1, 1, 3));
291: dim = 3;
292: PetscCall(PetscOptionsIntArray("-faces", "Number of faces per dimension", "ex18.c", options->faces, &dim, &flg2));
293: if (flg2) {
294: PetscCheck(!flg1 || dim == options->dim, comm, PETSC_ERR_ARG_OUTOFRANGE, "specified -dim %" PetscInt_FMT " is not equal to length %" PetscInt_FMT " of -faces (note that -dim can be omitted)", options->dim, dim);
295: options->dim = dim;
296: }
297: PetscCall(PetscOptionsString("-filename", "The mesh file", "ex18.c", options->filename, options->filename, sizeof(options->filename), NULL));
298: PetscCall(PetscOptionsBoundedInt("-rotate_interface_0", "Rotation (relative orientation) of interface on rank 0; implies -interpolate create -distribute 0", "ex18.c", options->ornt[0], &options->ornt[0], &options->testOrientIF, 0));
299: PetscCall(PetscOptionsBoundedInt("-rotate_interface_1", "Rotation (relative orientation) of interface on rank 1; implies -interpolate create -distribute 0", "ex18.c", options->ornt[1], &options->ornt[1], &flg2, 0));
300: PetscCheck(flg2 == options->testOrientIF, comm, PETSC_ERR_ARG_OUTOFRANGE, "neither or both -rotate_interface_0 -rotate_interface_1 must be set");
301: if (options->testOrientIF) {
302: PetscInt i;
303: for (i = 0; i < 2; i++) {
304: if (options->ornt[i] >= 10) options->ornt[i] = -(options->ornt[i] - 10); /* 11 12 13 become -1 -2 -3 */
305: }
306: options->filename[0] = 0;
307: options->useGenerator = PETSC_FALSE;
308: options->dim = 3;
309: options->cellSimplex = PETSC_TRUE;
310: options->interpolate = CREATE;
311: options->distribute = PETSC_FALSE;
312: }
313: PetscOptionsEnd();
314: PetscFunctionReturn(PETSC_SUCCESS);
315: }
317: static PetscErrorCode CreateMesh_1D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
318: {
319: PetscInt testNum = user->testNum;
320: PetscMPIInt rank, size;
321: PetscInt numCorners = 2, i;
322: PetscInt numCells, numVertices, network;
323: PetscInt *cells;
324: PetscReal *coords;
326: PetscFunctionBegin;
327: PetscCallMPI(MPI_Comm_rank(comm, &rank));
328: PetscCallMPI(MPI_Comm_size(comm, &size));
329: PetscCheck(size <= 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for <=2 processes", testNum);
331: numCells = 3;
332: PetscCall(PetscOptionsGetInt(NULL, NULL, "-ncells", &numCells, NULL));
333: PetscCheck(numCells >= 3, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test ncells %" PetscInt_FMT " must >=3", numCells);
335: if (size == 1) {
336: numVertices = numCells + 1;
337: PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numVertices, &coords));
338: for (i = 0; i < numCells; i++) {
339: cells[2 * i] = i;
340: cells[2 * i + 1] = i + 1;
341: coords[2 * i] = i;
342: coords[2 * i + 1] = i + 1;
343: }
345: PetscCall(DMPlexCreateFromCellListPetsc(comm, user->dim, numCells, numVertices, numCorners, PETSC_FALSE, cells, user->dim, coords, dm));
346: PetscCall(PetscFree2(cells, coords));
347: PetscFunctionReturn(PETSC_SUCCESS);
348: }
350: network = 0;
351: PetscCall(PetscOptionsGetInt(NULL, NULL, "-network_case", &network, NULL));
352: if (network == 0) {
353: switch (rank) {
354: case 0: {
355: numCells = 2;
356: numVertices = numCells;
357: PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
358: cells[0] = 0;
359: cells[1] = 1;
360: cells[2] = 1;
361: cells[3] = 2;
362: coords[0] = 0.;
363: coords[1] = 1.;
364: coords[2] = 1.;
365: coords[3] = 2.;
366: } break;
367: case 1: {
368: numCells -= 2;
369: numVertices = numCells + 1;
370: PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
371: for (i = 0; i < numCells; i++) {
372: cells[2 * i] = 2 + i;
373: cells[2 * i + 1] = 2 + i + 1;
374: coords[2 * i] = 2 + i;
375: coords[2 * i + 1] = 2 + i + 1;
376: }
377: } break;
378: default:
379: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
380: }
381: } else { /* network_case = 1 */
382: /* ----------------------- */
383: switch (rank) {
384: case 0: {
385: numCells = 2;
386: numVertices = 3;
387: PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
388: cells[0] = 0;
389: cells[1] = 3;
390: cells[2] = 3;
391: cells[3] = 1;
392: } break;
393: case 1: {
394: numCells = 1;
395: numVertices = 1;
396: PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
397: cells[0] = 3;
398: cells[1] = 2;
399: } break;
400: default:
401: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
402: }
403: }
404: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, PETSC_FALSE, cells, user->dim, coords, NULL, NULL, dm));
405: PetscCall(PetscFree2(cells, coords));
406: PetscFunctionReturn(PETSC_SUCCESS);
407: }
409: static PetscErrorCode CreateSimplex_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
410: {
411: PetscInt testNum = user->testNum, p;
412: PetscMPIInt rank, size;
414: PetscFunctionBegin;
415: PetscCallMPI(MPI_Comm_rank(comm, &rank));
416: PetscCallMPI(MPI_Comm_size(comm, &size));
417: switch (testNum) {
418: case 0:
419: PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
420: switch (rank) {
421: case 0: {
422: const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
423: const PetscInt cells[3] = {0, 1, 2};
424: PetscReal coords[4] = {-0.5, 0.5, 0.0, 0.0};
425: PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1};
427: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
428: for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
429: } break;
430: case 1: {
431: const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
432: const PetscInt cells[3] = {1, 3, 2};
433: PetscReal coords[4] = {0.0, 1.0, 0.5, 0.5};
434: PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1};
436: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
437: for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
438: } break;
439: default:
440: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
441: }
442: break;
443: case 1:
444: PetscCheck(size == 3, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 3 processes", testNum);
445: switch (rank) {
446: case 0: {
447: const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
448: const PetscInt cells[3] = {0, 1, 2};
449: PetscReal coords[4] = {0.0, 1.0, 0.0, 0.0};
450: PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1};
452: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
453: for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
454: } break;
455: case 1: {
456: const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
457: const PetscInt cells[3] = {0, 2, 3};
458: PetscReal coords[4] = {0.5, 0.5, 1.0, 1.0};
459: PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1};
461: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
462: for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
463: } break;
464: case 2: {
465: const PetscInt numCells = 2, numVertices = 1, numCorners = 3;
466: const PetscInt cells[6] = {2, 4, 3, 2, 1, 4};
467: PetscReal coords[2] = {1.0, 0.0};
468: PetscInt markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};
470: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
471: for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
472: } break;
473: default:
474: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
475: }
476: break;
477: case 2:
478: PetscCheck(size == 3, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 3 processes", testNum);
479: switch (rank) {
480: case 0: {
481: const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
482: const PetscInt cells[3] = {1, 2, 0};
483: PetscReal coords[4] = {0.5, 0.5, 0.0, 1.0};
484: PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1};
486: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
487: for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
488: } break;
489: case 1: {
490: const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
491: const PetscInt cells[3] = {1, 0, 3};
492: PetscReal coords[4] = {0.0, 0.0, 1.0, 1.0};
493: PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1};
495: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
496: for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
497: } break;
498: case 2: {
499: const PetscInt numCells = 2, numVertices = 1, numCorners = 3;
500: const PetscInt cells[6] = {0, 4, 3, 0, 2, 4};
501: PetscReal coords[2] = {1.0, 0.0};
502: PetscInt markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};
504: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
505: for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
506: } break;
507: default:
508: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
509: }
510: break;
511: default:
512: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
513: }
514: PetscFunctionReturn(PETSC_SUCCESS);
515: }
517: static PetscErrorCode CreateSimplex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
518: {
519: PetscInt testNum = user->testNum, p;
520: PetscMPIInt rank, size;
522: PetscFunctionBegin;
523: PetscCallMPI(MPI_Comm_rank(comm, &rank));
524: PetscCallMPI(MPI_Comm_size(comm, &size));
525: switch (testNum) {
526: case 0:
527: PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
528: switch (rank) {
529: case 0: {
530: const PetscInt numCells = 1, numVertices = 2, numCorners = 4;
531: const PetscInt cells[4] = {0, 2, 1, 3};
532: PetscReal coords[6] = {0.0, 0.0, -0.5, 0.0, -0.5, 0.0};
533: PetscInt markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};
535: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
536: for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
537: } break;
538: case 1: {
539: const PetscInt numCells = 1, numVertices = 3, numCorners = 4;
540: const PetscInt cells[4] = {1, 2, 4, 3};
541: PetscReal coords[9] = {1.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.5};
542: PetscInt markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};
544: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
545: for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
546: } break;
547: default:
548: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
549: }
550: break;
551: default:
552: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
553: }
554: if (user->testOrientIF) {
555: PetscInt ifp[] = {8, 6};
557: PetscCall(PetscObjectSetName((PetscObject)*dm, "Mesh before orientation"));
558: PetscCall(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view"));
559: /* rotate interface face ifp[rank] by given orientation ornt[rank] */
560: PetscCall(DMPlexOrientPoint(*dm, ifp[rank], user->ornt[rank]));
561: PetscCall(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view"));
562: PetscCall(DMPlexCheckFaces(*dm, 0));
563: PetscCall(DMPlexOrientInterface_Internal(*dm));
564: PetscCall(PetscPrintf(comm, "Orientation test PASSED\n"));
565: }
566: PetscFunctionReturn(PETSC_SUCCESS);
567: }
569: static PetscErrorCode CreateQuad_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
570: {
571: PetscInt testNum = user->testNum, p;
572: PetscMPIInt rank, size;
574: PetscFunctionBegin;
575: PetscCallMPI(MPI_Comm_rank(comm, &rank));
576: PetscCallMPI(MPI_Comm_size(comm, &size));
577: switch (testNum) {
578: case 0:
579: PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
580: switch (rank) {
581: case 0: {
582: const PetscInt numCells = 1, numVertices = 3, numCorners = 4;
583: const PetscInt cells[4] = {0, 1, 2, 3};
584: PetscReal coords[6] = {-0.5, 0.0, 0.0, 0.0, 0.0, 1.0};
585: PetscInt markerPoints[4 * 2] = {1, 1, 2, 1, 3, 1, 4, 1};
587: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
588: for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
589: } break;
590: case 1: {
591: const PetscInt numCells = 1, numVertices = 3, numCorners = 4;
592: const PetscInt cells[4] = {1, 4, 5, 2};
593: PetscReal coords[6] = {-0.5, 1.0, 0.5, 0.0, 0.5, 1.0};
594: PetscInt markerPoints[4 * 2] = {1, 1, 2, 1, 3, 1, 4, 1};
596: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
597: for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
598: } break;
599: default:
600: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
601: }
602: break;
603: default:
604: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
605: }
606: PetscFunctionReturn(PETSC_SUCCESS);
607: }
609: static PetscErrorCode CreateHex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
610: {
611: PetscInt testNum = user->testNum, p;
612: PetscMPIInt rank, size;
614: PetscFunctionBegin;
615: PetscCallMPI(MPI_Comm_rank(comm, &rank));
616: PetscCallMPI(MPI_Comm_size(comm, &size));
617: switch (testNum) {
618: case 0:
619: PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
620: switch (rank) {
621: case 0: {
622: const PetscInt numCells = 1, numVertices = 6, numCorners = 8;
623: const PetscInt cells[8] = {0, 3, 2, 1, 4, 5, 6, 7};
624: PetscReal coords[6 * 3] = {-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, -0.5, 1.0, 0.0, -0.5, 0.0, 1.0, 0.0, 0.0, 1.0};
625: PetscInt markerPoints[8 * 2] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1, 7, 1, 8, 1, 9, 1};
627: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
628: for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
629: } break;
630: case 1: {
631: const PetscInt numCells = 1, numVertices = 6, numCorners = 8;
632: const PetscInt cells[8] = {1, 2, 9, 8, 5, 10, 11, 6};
633: PetscReal coords[6 * 3] = {0.0, 1.0, 1.0, -0.5, 1.0, 1.0, 0.5, 0.0, 0.0, 0.5, 1.0, 0.0, 0.5, 0.0, 1.0, 0.5, 1.0, 1.0};
634: PetscInt markerPoints[8 * 2] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1, 7, 1, 8, 1, 9, 1};
636: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
637: for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
638: } break;
639: default:
640: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
641: }
642: break;
643: default:
644: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
645: }
646: PetscFunctionReturn(PETSC_SUCCESS);
647: }
649: static PetscErrorCode CustomView(DM dm, PetscViewer v)
650: {
651: DMPlexInterpolatedFlag interpolated;
652: PetscBool distributed;
654: PetscFunctionBegin;
655: PetscCall(DMPlexIsDistributed(dm, &distributed));
656: PetscCall(DMPlexIsInterpolatedCollective(dm, &interpolated));
657: PetscCall(PetscViewerASCIIPrintf(v, "DMPlexIsDistributed: %s\n", PetscBools[distributed]));
658: PetscCall(PetscViewerASCIIPrintf(v, "DMPlexIsInterpolatedCollective: %s\n", DMPlexInterpolatedFlags[interpolated]));
659: PetscFunctionReturn(PETSC_SUCCESS);
660: }
662: static PetscErrorCode CreateMeshFromFile(MPI_Comm comm, AppCtx *user, DM *dm, DM *serialDM)
663: {
664: const char *filename = user->filename;
665: PetscBool testHeavy = user->testHeavy;
666: PetscBool interpCreate = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
667: PetscBool distributed = PETSC_FALSE;
669: PetscFunctionBegin;
670: *serialDM = NULL;
671: if (testHeavy && interpCreate) PetscCall(DMPlexSetOrientInterface_Private(NULL, PETSC_FALSE));
672: PetscCall(PetscLogStagePush(stage[0]));
673: PetscCall(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, dm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
674: PetscCall(PetscLogStagePop());
675: if (testHeavy && interpCreate) PetscCall(DMPlexSetOrientInterface_Private(NULL, PETSC_TRUE));
676: PetscCall(DMPlexIsDistributed(*dm, &distributed));
677: PetscCall(PetscPrintf(comm, "DMPlexCreateFromFile produced %s mesh.\n", distributed ? "distributed" : "serial"));
678: if (testHeavy && distributed) {
679: PetscCall(PetscOptionsSetValue(NULL, "-dm_plex_hdf5_force_sequential", NULL));
680: PetscCall(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, serialDM));
681: PetscCall(DMPlexIsDistributed(*serialDM, &distributed));
682: PetscCheck(!distributed, comm, PETSC_ERR_PLIB, "unable to create a serial DM from file");
683: }
684: PetscCall(DMGetDimension(*dm, &user->dim));
685: PetscFunctionReturn(PETSC_SUCCESS);
686: }
688: static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
689: {
690: PetscPartitioner part;
691: PortableBoundary boundary = NULL;
692: DM serialDM = NULL;
693: PetscBool cellSimplex = user->cellSimplex;
694: PetscBool useGenerator = user->useGenerator;
695: PetscBool interpCreate = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
696: PetscBool interpSerial = user->interpolate == AFTER_CREATE ? PETSC_TRUE : PETSC_FALSE;
697: PetscBool interpParallel = user->interpolate == AFTER_DISTRIBUTE ? PETSC_TRUE : PETSC_FALSE;
698: PetscBool testHeavy = user->testHeavy;
699: PetscMPIInt rank;
701: PetscFunctionBegin;
702: PetscCallMPI(MPI_Comm_rank(comm, &rank));
703: if (user->filename[0]) {
704: PetscCall(CreateMeshFromFile(comm, user, dm, &serialDM));
705: } else if (useGenerator) {
706: PetscCall(PetscLogStagePush(stage[0]));
707: PetscCall(DMPlexCreateBoxMesh(comm, user->dim, cellSimplex, user->faces, NULL, NULL, NULL, interpCreate, dm));
708: PetscCall(PetscLogStagePop());
709: } else {
710: PetscCall(PetscLogStagePush(stage[0]));
711: switch (user->dim) {
712: case 1:
713: PetscCall(CreateMesh_1D(comm, interpCreate, user, dm));
714: break;
715: case 2:
716: if (cellSimplex) {
717: PetscCall(CreateSimplex_2D(comm, interpCreate, user, dm));
718: } else {
719: PetscCall(CreateQuad_2D(comm, interpCreate, user, dm));
720: }
721: break;
722: case 3:
723: if (cellSimplex) {
724: PetscCall(CreateSimplex_3D(comm, interpCreate, user, dm));
725: } else {
726: PetscCall(CreateHex_3D(comm, interpCreate, user, dm));
727: }
728: break;
729: default:
730: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot make meshes for dimension %" PetscInt_FMT, user->dim);
731: }
732: PetscCall(PetscLogStagePop());
733: }
734: PetscCheck(user->ncoords % user->dim == 0, comm, PETSC_ERR_ARG_OUTOFRANGE, "length of coordinates array %" PetscInt_FMT " must be divisible by spatial dimension %" PetscInt_FMT, user->ncoords, user->dim);
735: PetscCall(PetscObjectSetName((PetscObject)*dm, "Original Mesh"));
736: PetscCall(DMViewFromOptions(*dm, NULL, "-orig_dm_view"));
738: if (interpSerial) {
739: DM idm;
741: if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE));
742: PetscCall(PetscLogStagePush(stage[2]));
743: PetscCall(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
744: PetscCall(PetscLogStagePop());
745: if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE));
746: PetscCall(DMDestroy(dm));
747: *dm = idm;
748: PetscCall(PetscObjectSetName((PetscObject)*dm, "Interpolated Mesh"));
749: PetscCall(DMViewFromOptions(*dm, NULL, "-intp_dm_view"));
750: }
752: /* Set partitioner options */
753: PetscCall(DMPlexGetPartitioner(*dm, &part));
754: if (part) {
755: PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE));
756: PetscCall(PetscPartitionerSetFromOptions(part));
757: }
759: if (user->customView) PetscCall(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm)));
760: if (testHeavy) {
761: PetscBool distributed;
763: PetscCall(DMPlexIsDistributed(*dm, &distributed));
764: if (!serialDM && !distributed) {
765: serialDM = *dm;
766: PetscCall(PetscObjectReference((PetscObject)*dm));
767: }
768: if (serialDM) PetscCall(DMPlexGetExpandedBoundary_Private(serialDM, &boundary));
769: if (boundary) {
770: /* check DM which has been created in parallel and already interpolated */
771: PetscCall(DMPlexCheckPointSFHeavy(*dm, boundary));
772: }
773: /* Orient interface because it could be deliberately skipped above. It is idempotent. */
774: PetscCall(DMPlexOrientInterface_Internal(*dm));
775: }
776: if (user->distribute) {
777: DM pdm = NULL;
779: /* Redistribute mesh over processes using that partitioner */
780: PetscCall(PetscLogStagePush(stage[1]));
781: PetscCall(DMPlexDistribute(*dm, 0, NULL, &pdm));
782: PetscCall(PetscLogStagePop());
783: if (pdm) {
784: PetscCall(DMDestroy(dm));
785: *dm = pdm;
786: PetscCall(PetscObjectSetName((PetscObject)*dm, "Redistributed Mesh"));
787: PetscCall(DMViewFromOptions(*dm, NULL, "-dist_dm_view"));
788: }
790: if (interpParallel) {
791: DM idm;
793: if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE));
794: PetscCall(PetscLogStagePush(stage[2]));
795: PetscCall(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
796: PetscCall(PetscLogStagePop());
797: if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE));
798: PetscCall(DMDestroy(dm));
799: *dm = idm;
800: PetscCall(PetscObjectSetName((PetscObject)*dm, "Interpolated Redistributed Mesh"));
801: PetscCall(DMViewFromOptions(*dm, NULL, "-intp_dm_view"));
802: }
803: }
804: if (testHeavy) {
805: if (boundary) PetscCall(DMPlexCheckPointSFHeavy(*dm, boundary));
806: /* Orient interface because it could be deliberately skipped above. It is idempotent. */
807: PetscCall(DMPlexOrientInterface_Internal(*dm));
808: }
810: PetscCall(PetscObjectSetName((PetscObject)*dm, "Parallel Mesh"));
811: PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
812: PetscCall(DMSetFromOptions(*dm));
813: PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
815: if (user->customView) PetscCall(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm)));
816: PetscCall(DMDestroy(&serialDM));
817: PetscCall(PortableBoundaryDestroy(&boundary));
818: PetscFunctionReturn(PETSC_SUCCESS);
819: }
821: #define ps2d(number) ((double)PetscRealPart(number))
822: static inline PetscErrorCode coord2str(char buf[], size_t len, PetscInt dim, const PetscScalar coords[], PetscReal tol)
823: {
824: PetscFunctionBegin;
825: PetscCheck(dim <= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "dim must be less than or equal 3");
826: if (tol >= 1e-3) {
827: switch (dim) {
828: case 1:
829: PetscCall(PetscSNPrintf(buf, len, "(%12.3f)", ps2d(coords[0])));
830: case 2:
831: PetscCall(PetscSNPrintf(buf, len, "(%12.3f, %12.3f)", ps2d(coords[0]), ps2d(coords[1])));
832: case 3:
833: PetscCall(PetscSNPrintf(buf, len, "(%12.3f, %12.3f, %12.3f)", ps2d(coords[0]), ps2d(coords[1]), ps2d(coords[2])));
834: }
835: } else {
836: switch (dim) {
837: case 1:
838: PetscCall(PetscSNPrintf(buf, len, "(%12.6f)", ps2d(coords[0])));
839: case 2:
840: PetscCall(PetscSNPrintf(buf, len, "(%12.6f, %12.6f)", ps2d(coords[0]), ps2d(coords[1])));
841: case 3:
842: PetscCall(PetscSNPrintf(buf, len, "(%12.6f, %12.6f, %12.6f)", ps2d(coords[0]), ps2d(coords[1]), ps2d(coords[2])));
843: }
844: }
845: PetscFunctionReturn(PETSC_SUCCESS);
846: }
848: static PetscErrorCode ViewVerticesFromCoords(DM dm, Vec coordsVec, PetscReal tol, PetscViewer viewer)
849: {
850: PetscInt dim, i, npoints;
851: IS pointsIS;
852: const PetscInt *points;
853: const PetscScalar *coords;
854: char coordstr[128];
855: MPI_Comm comm;
856: PetscMPIInt rank;
858: PetscFunctionBegin;
859: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
860: PetscCallMPI(MPI_Comm_rank(comm, &rank));
861: PetscCall(DMGetDimension(dm, &dim));
862: PetscCall(PetscViewerASCIIPushSynchronized(viewer));
863: PetscCall(DMPlexFindVertices(dm, coordsVec, tol, &pointsIS));
864: PetscCall(ISGetIndices(pointsIS, &points));
865: PetscCall(ISGetLocalSize(pointsIS, &npoints));
866: PetscCall(VecGetArrayRead(coordsVec, &coords));
867: for (i = 0; i < npoints; i++) {
868: PetscCall(coord2str(coordstr, sizeof(coordstr), dim, &coords[i * dim], tol));
869: if (rank == 0 && i) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "-----\n"));
870: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %s --> points[%" PetscInt_FMT "] = %" PetscInt_FMT "\n", rank, coordstr, i, points[i]));
871: PetscCall(PetscViewerFlush(viewer));
872: }
873: PetscCall(PetscViewerASCIIPopSynchronized(viewer));
874: PetscCall(VecRestoreArrayRead(coordsVec, &coords));
875: PetscCall(ISRestoreIndices(pointsIS, &points));
876: PetscCall(ISDestroy(&pointsIS));
877: PetscFunctionReturn(PETSC_SUCCESS);
878: }
880: static PetscErrorCode TestExpandPoints(DM dm, AppCtx *user)
881: {
882: IS is;
883: PetscSection *sects;
884: IS *iss;
885: PetscInt d, depth;
886: PetscMPIInt rank;
887: PetscViewer viewer = PETSC_VIEWER_STDOUT_WORLD, sviewer;
889: PetscFunctionBegin;
890: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
891: if (user->testExpandPointsEmpty && rank == 0) {
892: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &is));
893: } else {
894: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, user->nPointsToExpand, user->pointsToExpand, PETSC_USE_POINTER, &is));
895: }
896: PetscCall(DMPlexGetConeRecursive(dm, is, &depth, &iss, §s));
897: PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
898: PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] ==========================\n", rank));
899: for (d = depth - 1; d >= 0; d--) {
900: IS checkIS;
901: PetscBool flg;
903: PetscCall(PetscViewerASCIIPrintf(sviewer, "depth %" PetscInt_FMT " ---------------\n", d));
904: PetscCall(PetscSectionView(sects[d], sviewer));
905: PetscCall(ISView(iss[d], sviewer));
906: /* check reverse operation */
907: if (d < depth - 1) {
908: PetscCall(DMPlexExpandedConesToFaces_Private(dm, iss[d], sects[d], &checkIS));
909: PetscCall(ISEqualUnsorted(checkIS, iss[d + 1], &flg));
910: PetscCheck(flg, PetscObjectComm((PetscObject)checkIS), PETSC_ERR_PLIB, "DMPlexExpandedConesToFaces_Private produced wrong IS");
911: PetscCall(ISDestroy(&checkIS));
912: }
913: }
914: PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
915: PetscCall(PetscViewerFlush(viewer));
916: PetscCall(DMPlexRestoreConeRecursive(dm, is, &depth, &iss, §s));
917: PetscCall(ISDestroy(&is));
918: PetscFunctionReturn(PETSC_SUCCESS);
919: }
921: static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM dm, IS is, PetscSection section, IS *newis)
922: {
923: PetscInt n, n1, ncone, numCoveredPoints, o, p, q, start, end;
924: const PetscInt *coveredPoints;
925: const PetscInt *arr, *cone;
926: PetscInt *newarr;
928: PetscFunctionBegin;
929: PetscCall(ISGetLocalSize(is, &n));
930: PetscCall(PetscSectionGetStorageSize(section, &n1));
931: PetscCall(PetscSectionGetChart(section, &start, &end));
932: PetscCheck(n == n1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "IS size = %" PetscInt_FMT " != %" PetscInt_FMT " = section storage size", n, n1);
933: PetscCall(ISGetIndices(is, &arr));
934: PetscCall(PetscMalloc1(end - start, &newarr));
935: for (q = start; q < end; q++) {
936: PetscCall(PetscSectionGetDof(section, q, &ncone));
937: PetscCall(PetscSectionGetOffset(section, q, &o));
938: cone = &arr[o];
939: if (ncone == 1) {
940: numCoveredPoints = 1;
941: p = cone[0];
942: } else {
943: PetscInt i;
944: p = PETSC_MAX_INT;
945: for (i = 0; i < ncone; i++)
946: if (cone[i] < 0) {
947: p = -1;
948: break;
949: }
950: if (p >= 0) {
951: PetscCall(DMPlexGetJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints));
952: PetscCheck(numCoveredPoints <= 1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "more than one covered points for section point %" PetscInt_FMT, q);
953: if (numCoveredPoints) p = coveredPoints[0];
954: else p = -2;
955: PetscCall(DMPlexRestoreJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints));
956: }
957: }
958: newarr[q - start] = p;
959: }
960: PetscCall(ISRestoreIndices(is, &arr));
961: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, end - start, newarr, PETSC_OWN_POINTER, newis));
962: PetscFunctionReturn(PETSC_SUCCESS);
963: }
965: static PetscErrorCode DMPlexExpandedVerticesToFaces_Private(DM dm, IS boundary_expanded_is, PetscInt depth, PetscSection sections[], IS *boundary_is)
966: {
967: PetscInt d;
968: IS is, newis;
970: PetscFunctionBegin;
971: is = boundary_expanded_is;
972: PetscCall(PetscObjectReference((PetscObject)is));
973: for (d = 0; d < depth - 1; ++d) {
974: PetscCall(DMPlexExpandedConesToFaces_Private(dm, is, sections[d], &newis));
975: PetscCall(ISDestroy(&is));
976: is = newis;
977: }
978: *boundary_is = is;
979: PetscFunctionReturn(PETSC_SUCCESS);
980: }
982: #define CHKERRQI(incall, ierr) \
983: if (ierr) incall = PETSC_FALSE;
985: static PetscErrorCode DMLabelViewFromOptionsOnComm_Private(DMLabel label, const char optionname[], MPI_Comm comm)
986: {
987: PetscViewer viewer;
988: PetscBool flg;
989: static PetscBool incall = PETSC_FALSE;
990: PetscViewerFormat format;
992: PetscFunctionBegin;
993: if (incall) PetscFunctionReturn(PETSC_SUCCESS);
994: incall = PETSC_TRUE;
995: CHKERRQI(incall, PetscOptionsGetViewer(comm, ((PetscObject)label)->options, ((PetscObject)label)->prefix, optionname, &viewer, &format, &flg));
996: if (flg) {
997: CHKERRQI(incall, PetscViewerPushFormat(viewer, format));
998: CHKERRQI(incall, DMLabelView(label, viewer));
999: CHKERRQI(incall, PetscViewerPopFormat(viewer));
1000: CHKERRQI(incall, PetscViewerDestroy(&viewer));
1001: }
1002: incall = PETSC_FALSE;
1003: PetscFunctionReturn(PETSC_SUCCESS);
1004: }
1006: /* TODO: this is hotfixing DMLabelGetStratumIS() - it should be fixed systematically instead */
1007: static inline PetscErrorCode DMLabelGetStratumISOnComm_Private(DMLabel label, PetscInt value, MPI_Comm comm, IS *is)
1008: {
1009: IS tmpis;
1011: PetscFunctionBegin;
1012: PetscCall(DMLabelGetStratumIS(label, value, &tmpis));
1013: if (!tmpis) PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &tmpis));
1014: PetscCall(ISOnComm(tmpis, comm, PETSC_COPY_VALUES, is));
1015: PetscCall(ISDestroy(&tmpis));
1016: PetscFunctionReturn(PETSC_SUCCESS);
1017: }
1019: /* currently only for simple PetscSection without fields or constraints */
1020: static PetscErrorCode PetscSectionReplicate_Private(MPI_Comm comm, PetscMPIInt rootrank, PetscSection sec0, PetscSection *secout)
1021: {
1022: PetscSection sec;
1023: PetscInt chart[2], p;
1024: PetscInt *dofarr;
1025: PetscMPIInt rank;
1027: PetscFunctionBegin;
1028: PetscCallMPI(MPI_Comm_rank(comm, &rank));
1029: if (rank == rootrank) PetscCall(PetscSectionGetChart(sec0, &chart[0], &chart[1]));
1030: PetscCallMPI(MPI_Bcast(chart, 2, MPIU_INT, rootrank, comm));
1031: PetscCall(PetscMalloc1(chart[1] - chart[0], &dofarr));
1032: if (rank == rootrank) {
1033: for (p = chart[0]; p < chart[1]; p++) PetscCall(PetscSectionGetDof(sec0, p, &dofarr[p - chart[0]]));
1034: }
1035: PetscCallMPI(MPI_Bcast(dofarr, chart[1] - chart[0], MPIU_INT, rootrank, comm));
1036: PetscCall(PetscSectionCreate(comm, &sec));
1037: PetscCall(PetscSectionSetChart(sec, chart[0], chart[1]));
1038: for (p = chart[0]; p < chart[1]; p++) PetscCall(PetscSectionSetDof(sec, p, dofarr[p - chart[0]]));
1039: PetscCall(PetscSectionSetUp(sec));
1040: PetscCall(PetscFree(dofarr));
1041: *secout = sec;
1042: PetscFunctionReturn(PETSC_SUCCESS);
1043: }
1045: static PetscErrorCode DMPlexExpandedVerticesCoordinatesToFaces_Private(DM ipdm, PortableBoundary bnd, IS *face_is)
1046: {
1047: IS faces_expanded_is;
1049: PetscFunctionBegin;
1050: PetscCall(DMPlexFindVertices(ipdm, bnd->coordinates, 0.0, &faces_expanded_is));
1051: PetscCall(DMPlexExpandedVerticesToFaces_Private(ipdm, faces_expanded_is, bnd->depth, bnd->sections, face_is));
1052: PetscCall(ISDestroy(&faces_expanded_is));
1053: PetscFunctionReturn(PETSC_SUCCESS);
1054: }
1056: /* hack disabling DMPlexOrientInterface() call in DMPlexInterpolate() via -dm_plex_interpolate_orient_interfaces option */
1057: static PetscErrorCode DMPlexSetOrientInterface_Private(DM dm, PetscBool enable)
1058: {
1059: PetscOptions options = NULL;
1060: const char *prefix = NULL;
1061: const char opt[] = "-dm_plex_interpolate_orient_interfaces";
1062: char prefix_opt[512];
1063: PetscBool flg, set;
1064: static PetscBool wasSetTrue = PETSC_FALSE;
1066: PetscFunctionBegin;
1067: if (dm) {
1068: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix));
1069: options = ((PetscObject)dm)->options;
1070: }
1071: PetscCall(PetscStrncpy(prefix_opt, "-", sizeof(prefix_opt)));
1072: PetscCall(PetscStrlcat(prefix_opt, prefix, sizeof(prefix_opt)));
1073: PetscCall(PetscStrlcat(prefix_opt, &opt[1], sizeof(prefix_opt)));
1074: PetscCall(PetscOptionsGetBool(options, prefix, opt, &flg, &set));
1075: if (!enable) {
1076: if (set && flg) wasSetTrue = PETSC_TRUE;
1077: PetscCall(PetscOptionsSetValue(options, prefix_opt, "0"));
1078: } else if (set && !flg) {
1079: if (wasSetTrue) {
1080: PetscCall(PetscOptionsSetValue(options, prefix_opt, "1"));
1081: } else {
1082: /* default is PETSC_TRUE */
1083: PetscCall(PetscOptionsClearValue(options, prefix_opt));
1084: }
1085: wasSetTrue = PETSC_FALSE;
1086: }
1087: if (PetscDefined(USE_DEBUG)) {
1088: PetscCall(PetscOptionsGetBool(options, prefix, opt, &flg, &set));
1089: PetscCheck(!set || flg == enable, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "PetscOptionsSetValue did not have the desired effect");
1090: }
1091: PetscFunctionReturn(PETSC_SUCCESS);
1092: }
1094: /* get coordinate description of the whole-domain boundary */
1095: static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM dm, PortableBoundary *boundary)
1096: {
1097: PortableBoundary bnd0, bnd;
1098: MPI_Comm comm;
1099: DM idm;
1100: DMLabel label;
1101: PetscInt d;
1102: const char boundaryName[] = "DMPlexDistributeInterpolateMarkInterface_boundary";
1103: IS boundary_is;
1104: IS *boundary_expanded_iss;
1105: PetscMPIInt rootrank = 0;
1106: PetscMPIInt rank, size;
1107: PetscInt value = 1;
1108: DMPlexInterpolatedFlag intp;
1109: PetscBool flg;
1111: PetscFunctionBegin;
1112: PetscCall(PetscNew(&bnd));
1113: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1114: PetscCallMPI(MPI_Comm_rank(comm, &rank));
1115: PetscCallMPI(MPI_Comm_size(comm, &size));
1116: PetscCall(DMPlexIsDistributed(dm, &flg));
1117: PetscCheck(!flg, comm, PETSC_ERR_ARG_WRONG, "serial DM (all points on one rank) needed");
1119: /* interpolate serial DM if not yet interpolated */
1120: PetscCall(DMPlexIsInterpolatedCollective(dm, &intp));
1121: if (intp == DMPLEX_INTERPOLATED_FULL) {
1122: idm = dm;
1123: PetscCall(PetscObjectReference((PetscObject)dm));
1124: } else {
1125: PetscCall(DMPlexInterpolate(dm, &idm));
1126: PetscCall(DMViewFromOptions(idm, NULL, "-idm_view"));
1127: }
1129: /* mark whole-domain boundary of the serial DM */
1130: PetscCall(DMLabelCreate(PETSC_COMM_SELF, boundaryName, &label));
1131: PetscCall(DMAddLabel(idm, label));
1132: PetscCall(DMPlexMarkBoundaryFaces(idm, value, label));
1133: PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-idm_boundary_view", comm));
1134: PetscCall(DMLabelGetStratumIS(label, value, &boundary_is));
1136: /* translate to coordinates */
1137: PetscCall(PetscNew(&bnd0));
1138: PetscCall(DMGetCoordinatesLocalSetUp(idm));
1139: if (rank == rootrank) {
1140: PetscCall(DMPlexGetConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections));
1141: PetscCall(DMGetCoordinatesLocalTuple(dm, boundary_expanded_iss[0], NULL, &bnd0->coordinates));
1142: /* self-check */
1143: {
1144: IS is0;
1145: PetscCall(DMPlexExpandedVerticesCoordinatesToFaces_Private(idm, bnd0, &is0));
1146: PetscCall(ISEqual(is0, boundary_is, &flg));
1147: PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_PLIB, "DMPlexExpandedVerticesCoordinatesToFaces_Private produced a wrong IS");
1148: PetscCall(ISDestroy(&is0));
1149: }
1150: } else {
1151: PetscCall(VecCreateSeq(PETSC_COMM_SELF, 0, &bnd0->coordinates));
1152: }
1154: {
1155: Vec tmp;
1156: VecScatter sc;
1157: IS xis;
1158: PetscInt n;
1160: /* just convert seq vectors to mpi vector */
1161: PetscCall(VecGetLocalSize(bnd0->coordinates, &n));
1162: PetscCallMPI(MPI_Bcast(&n, 1, MPIU_INT, rootrank, comm));
1163: if (rank == rootrank) {
1164: PetscCall(VecCreateMPI(comm, n, n, &tmp));
1165: } else {
1166: PetscCall(VecCreateMPI(comm, 0, n, &tmp));
1167: }
1168: PetscCall(VecCopy(bnd0->coordinates, tmp));
1169: PetscCall(VecDestroy(&bnd0->coordinates));
1170: bnd0->coordinates = tmp;
1172: /* replicate coordinates from root rank to all ranks */
1173: PetscCall(VecCreateMPI(comm, n, n * size, &bnd->coordinates));
1174: PetscCall(ISCreateStride(comm, n, 0, 1, &xis));
1175: PetscCall(VecScatterCreate(bnd0->coordinates, xis, bnd->coordinates, NULL, &sc));
1176: PetscCall(VecScatterBegin(sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD));
1177: PetscCall(VecScatterEnd(sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD));
1178: PetscCall(VecScatterDestroy(&sc));
1179: PetscCall(ISDestroy(&xis));
1180: }
1181: bnd->depth = bnd0->depth;
1182: PetscCallMPI(MPI_Bcast(&bnd->depth, 1, MPIU_INT, rootrank, comm));
1183: PetscCall(PetscMalloc1(bnd->depth, &bnd->sections));
1184: for (d = 0; d < bnd->depth; d++) PetscCall(PetscSectionReplicate_Private(comm, rootrank, (rank == rootrank) ? bnd0->sections[d] : NULL, &bnd->sections[d]));
1186: if (rank == rootrank) PetscCall(DMPlexRestoreConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections));
1187: PetscCall(PortableBoundaryDestroy(&bnd0));
1188: PetscCall(DMRemoveLabelBySelf(idm, &label, PETSC_TRUE));
1189: PetscCall(DMLabelDestroy(&label));
1190: PetscCall(ISDestroy(&boundary_is));
1191: PetscCall(DMDestroy(&idm));
1192: *boundary = bnd;
1193: PetscFunctionReturn(PETSC_SUCCESS);
1194: }
1196: /* get faces of inter-partition interface */
1197: static PetscErrorCode DMPlexGetInterfaceFaces_Private(DM ipdm, IS boundary_faces_is, IS *interface_faces_is)
1198: {
1199: MPI_Comm comm;
1200: DMLabel label;
1201: IS part_boundary_faces_is;
1202: const char partBoundaryName[] = "DMPlexDistributeInterpolateMarkInterface_partBoundary";
1203: PetscInt value = 1;
1204: DMPlexInterpolatedFlag intp;
1206: PetscFunctionBegin;
1207: PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1208: PetscCall(DMPlexIsInterpolatedCollective(ipdm, &intp));
1209: PetscCheck(intp == DMPLEX_INTERPOLATED_FULL, comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");
1211: /* get ipdm partition boundary (partBoundary) */
1212: {
1213: PetscSF sf;
1215: PetscCall(DMLabelCreate(PETSC_COMM_SELF, partBoundaryName, &label));
1216: PetscCall(DMAddLabel(ipdm, label));
1217: PetscCall(DMGetPointSF(ipdm, &sf));
1218: PetscCall(DMSetPointSF(ipdm, NULL));
1219: PetscCall(DMPlexMarkBoundaryFaces(ipdm, value, label));
1220: PetscCall(DMSetPointSF(ipdm, sf));
1221: PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-ipdm_part_boundary_view", comm));
1222: PetscCall(DMLabelGetStratumISOnComm_Private(label, value, comm, &part_boundary_faces_is));
1223: PetscCall(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE));
1224: PetscCall(DMLabelDestroy(&label));
1225: }
1227: /* remove ipdm whole-domain boundary (boundary_faces_is) from ipdm partition boundary (part_boundary_faces_is), resulting just in inter-partition interface */
1228: PetscCall(ISDifference(part_boundary_faces_is, boundary_faces_is, interface_faces_is));
1229: PetscCall(ISDestroy(&part_boundary_faces_is));
1230: PetscFunctionReturn(PETSC_SUCCESS);
1231: }
1233: /* compute inter-partition interface including edges and vertices */
1234: static PetscErrorCode DMPlexComputeCompleteInterface_Private(DM ipdm, IS interface_faces_is, IS *interface_is)
1235: {
1236: DMLabel label;
1237: PetscInt value = 1;
1238: const char interfaceName[] = "DMPlexDistributeInterpolateMarkInterface_interface";
1239: DMPlexInterpolatedFlag intp;
1240: MPI_Comm comm;
1242: PetscFunctionBegin;
1243: PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1244: PetscCall(DMPlexIsInterpolatedCollective(ipdm, &intp));
1245: PetscCheck(intp == DMPLEX_INTERPOLATED_FULL, comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");
1247: PetscCall(DMLabelCreate(PETSC_COMM_SELF, interfaceName, &label));
1248: PetscCall(DMAddLabel(ipdm, label));
1249: PetscCall(DMLabelSetStratumIS(label, value, interface_faces_is));
1250: PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-interface_faces_view", comm));
1251: PetscCall(DMPlexLabelComplete(ipdm, label));
1252: PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-interface_view", comm));
1253: PetscCall(DMLabelGetStratumISOnComm_Private(label, value, comm, interface_is));
1254: PetscCall(PetscObjectSetName((PetscObject)*interface_is, "interface_is"));
1255: PetscCall(ISViewFromOptions(*interface_is, NULL, "-interface_is_view"));
1256: PetscCall(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE));
1257: PetscCall(DMLabelDestroy(&label));
1258: PetscFunctionReturn(PETSC_SUCCESS);
1259: }
1261: static PetscErrorCode PointSFGetOutwardInterfacePoints(PetscSF sf, IS *is)
1262: {
1263: PetscInt n;
1264: const PetscInt *arr;
1266: PetscFunctionBegin;
1267: PetscCall(PetscSFGetGraph(sf, NULL, &n, &arr, NULL));
1268: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_USE_POINTER, is));
1269: PetscFunctionReturn(PETSC_SUCCESS);
1270: }
1272: static PetscErrorCode PointSFGetInwardInterfacePoints(PetscSF sf, IS *is)
1273: {
1274: PetscInt n;
1275: const PetscInt *rootdegree;
1276: PetscInt *arr;
1278: PetscFunctionBegin;
1279: PetscCall(PetscSFSetUp(sf));
1280: PetscCall(PetscSFComputeDegreeBegin(sf, &rootdegree));
1281: PetscCall(PetscSFComputeDegreeEnd(sf, &rootdegree));
1282: PetscCall(PetscSFComputeMultiRootOriginalNumbering(sf, rootdegree, &n, &arr));
1283: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_OWN_POINTER, is));
1284: PetscFunctionReturn(PETSC_SUCCESS);
1285: }
1287: static PetscErrorCode PointSFGetInterfacePoints_Private(PetscSF pointSF, IS *is)
1288: {
1289: IS pointSF_out_is, pointSF_in_is;
1291: PetscFunctionBegin;
1292: PetscCall(PointSFGetOutwardInterfacePoints(pointSF, &pointSF_out_is));
1293: PetscCall(PointSFGetInwardInterfacePoints(pointSF, &pointSF_in_is));
1294: PetscCall(ISExpand(pointSF_out_is, pointSF_in_is, is));
1295: PetscCall(ISDestroy(&pointSF_out_is));
1296: PetscCall(ISDestroy(&pointSF_in_is));
1297: PetscFunctionReturn(PETSC_SUCCESS);
1298: }
1300: #define CHKERRMY(ierr) PetscCheck(!ierr, PETSC_COMM_SELF, PETSC_ERR_PLIB, "PointSF is wrong. Unable to show details!")
1302: static PetscErrorCode ViewPointsWithType_Internal(DM dm, IS pointsIS, PetscViewer v)
1303: {
1304: DMLabel label;
1305: PetscSection coordsSection;
1306: Vec coordsVec;
1307: PetscScalar *coordsScalar;
1308: PetscInt coneSize, depth, dim, i, p, npoints;
1309: const PetscInt *points;
1311: PetscFunctionBegin;
1312: PetscCall(DMGetDimension(dm, &dim));
1313: PetscCall(DMGetCoordinateSection(dm, &coordsSection));
1314: PetscCall(DMGetCoordinatesLocal(dm, &coordsVec));
1315: PetscCall(VecGetArray(coordsVec, &coordsScalar));
1316: PetscCall(ISGetLocalSize(pointsIS, &npoints));
1317: PetscCall(ISGetIndices(pointsIS, &points));
1318: PetscCall(DMPlexGetDepthLabel(dm, &label));
1319: PetscCall(PetscViewerASCIIPushTab(v));
1320: for (i = 0; i < npoints; i++) {
1321: p = points[i];
1322: PetscCall(DMLabelGetValue(label, p, &depth));
1323: if (!depth) {
1324: PetscInt n, o;
1325: char coordstr[128];
1327: PetscCall(PetscSectionGetDof(coordsSection, p, &n));
1328: PetscCall(PetscSectionGetOffset(coordsSection, p, &o));
1329: PetscCall(coord2str(coordstr, sizeof(coordstr), n, &coordsScalar[o], 1.0));
1330: PetscCall(PetscViewerASCIISynchronizedPrintf(v, "vertex %" PetscInt_FMT " w/ coordinates %s\n", p, coordstr));
1331: } else {
1332: char entityType[16];
1334: switch (depth) {
1335: case 1:
1336: PetscCall(PetscStrncpy(entityType, "edge", sizeof(entityType)));
1337: break;
1338: case 2:
1339: PetscCall(PetscStrncpy(entityType, "face", sizeof(entityType)));
1340: break;
1341: case 3:
1342: PetscCall(PetscStrncpy(entityType, "cell", sizeof(entityType)));
1343: break;
1344: default:
1345: SETERRQ(PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for depth <= 3");
1346: }
1347: if (depth == dim && dim < 3) PetscCall(PetscStrlcat(entityType, " (cell)", sizeof(entityType)));
1348: PetscCall(PetscViewerASCIISynchronizedPrintf(v, "%s %" PetscInt_FMT "\n", entityType, p));
1349: }
1350: PetscCall(DMPlexGetConeSize(dm, p, &coneSize));
1351: if (coneSize) {
1352: const PetscInt *cone;
1353: IS coneIS;
1355: PetscCall(DMPlexGetCone(dm, p, &cone));
1356: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, coneSize, cone, PETSC_USE_POINTER, &coneIS));
1357: PetscCall(ViewPointsWithType_Internal(dm, coneIS, v));
1358: PetscCall(ISDestroy(&coneIS));
1359: }
1360: }
1361: PetscCall(PetscViewerASCIIPopTab(v));
1362: PetscCall(VecRestoreArray(coordsVec, &coordsScalar));
1363: PetscCall(ISRestoreIndices(pointsIS, &points));
1364: PetscFunctionReturn(PETSC_SUCCESS);
1365: }
1367: static PetscErrorCode ViewPointsWithType(DM dm, IS points, PetscViewer v)
1368: {
1369: PetscBool flg;
1370: PetscInt npoints;
1371: PetscMPIInt rank;
1373: PetscFunctionBegin;
1374: PetscCall(PetscObjectTypeCompare((PetscObject)v, PETSCVIEWERASCII, &flg));
1375: PetscCheck(flg, PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for ASCII viewer");
1376: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)v), &rank));
1377: PetscCall(PetscViewerASCIIPushSynchronized(v));
1378: PetscCall(ISGetLocalSize(points, &npoints));
1379: if (npoints) {
1380: PetscCall(PetscViewerASCIISynchronizedPrintf(v, "[%d] --------\n", rank));
1381: PetscCall(ViewPointsWithType_Internal(dm, points, v));
1382: }
1383: PetscCall(PetscViewerFlush(v));
1384: PetscCall(PetscViewerASCIIPopSynchronized(v));
1385: PetscFunctionReturn(PETSC_SUCCESS);
1386: }
1388: static PetscErrorCode DMPlexComparePointSFWithInterface_Private(DM ipdm, IS interface_is)
1389: {
1390: PetscSF pointsf;
1391: IS pointsf_is;
1392: PetscBool flg;
1393: MPI_Comm comm;
1394: PetscMPIInt size;
1396: PetscFunctionBegin;
1397: PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1398: PetscCallMPI(MPI_Comm_size(comm, &size));
1399: PetscCall(DMGetPointSF(ipdm, &pointsf));
1400: if (pointsf) {
1401: PetscInt nroots;
1402: PetscCall(PetscSFGetGraph(pointsf, &nroots, NULL, NULL, NULL));
1403: if (nroots < 0) pointsf = NULL; /* uninitialized SF */
1404: }
1405: if (!pointsf) {
1406: PetscInt N = 0;
1407: if (interface_is) PetscCall(ISGetSize(interface_is, &N));
1408: PetscCheck(!N, comm, PETSC_ERR_PLIB, "interface_is should be NULL or empty for PointSF being NULL");
1409: PetscFunctionReturn(PETSC_SUCCESS);
1410: }
1412: /* get PointSF points as IS pointsf_is */
1413: PetscCall(PointSFGetInterfacePoints_Private(pointsf, &pointsf_is));
1415: /* compare pointsf_is with interface_is */
1416: PetscCall(ISEqual(interface_is, pointsf_is, &flg));
1417: PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &flg, 1, MPIU_BOOL, MPI_LAND, comm));
1418: if (!flg) {
1419: IS pointsf_extra_is, pointsf_missing_is;
1420: PetscViewer errv = PETSC_VIEWER_STDERR_(comm);
1421: CHKERRMY(ISDifference(interface_is, pointsf_is, &pointsf_missing_is));
1422: CHKERRMY(ISDifference(pointsf_is, interface_is, &pointsf_extra_is));
1423: CHKERRMY(PetscViewerASCIIPrintf(errv, "Points missing in PointSF:\n"));
1424: CHKERRMY(ViewPointsWithType(ipdm, pointsf_missing_is, errv));
1425: CHKERRMY(PetscViewerASCIIPrintf(errv, "Extra points in PointSF:\n"));
1426: CHKERRMY(ViewPointsWithType(ipdm, pointsf_extra_is, errv));
1427: CHKERRMY(ISDestroy(&pointsf_extra_is));
1428: CHKERRMY(ISDestroy(&pointsf_missing_is));
1429: SETERRQ(comm, PETSC_ERR_PLIB, "PointSF is wrong! See details above.");
1430: }
1431: PetscCall(ISDestroy(&pointsf_is));
1432: PetscFunctionReturn(PETSC_SUCCESS);
1433: }
1435: /* remove faces & edges from label, leave just vertices */
1436: static PetscErrorCode DMPlexISFilterVertices_Private(DM dm, IS points)
1437: {
1438: PetscInt vStart, vEnd;
1439: MPI_Comm comm;
1441: PetscFunctionBegin;
1442: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1443: PetscCall(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd));
1444: PetscCall(ISGeneralFilter(points, vStart, vEnd));
1445: PetscFunctionReturn(PETSC_SUCCESS);
1446: }
1448: /*
1449: DMPlexCheckPointSFHeavy - Thoroughly test that the PointSF after parallel DMPlexInterpolate() includes exactly all interface points.
1451: Collective
1453: Input Parameter:
1454: . dm - The DMPlex object
1456: Notes:
1457: The input DMPlex must be serial (one partition has all points, the other partitions have no points).
1458: This is a heavy test which involves DMPlexInterpolate() if the input DM is not interpolated yet, and depends on having a representation of the whole-domain boundary (PortableBoundary), which can be obtained only by DMPlexGetExpandedBoundary_Private() (which involves DMPlexInterpolate() of a sequential DM).
1459: This is mainly intended for debugging/testing purposes.
1461: Algorithm:
1462: 1. boundary faces of the serial version of the whole mesh are found using DMPlexMarkBoundaryFaces()
1463: 2. boundary faces are translated into vertices using DMPlexGetConeRecursive() and these are translated into coordinates - this description (aka PortableBoundary) is completely independent of partitioning and point numbering
1464: 3. the mesh is distributed or loaded in parallel
1465: 4. boundary faces of the distributed mesh are reconstructed from PortableBoundary using DMPlexFindVertices()
1466: 5. partition boundary faces of the parallel mesh are found using DMPlexMarkBoundaryFaces()
1467: 6. partition interfaces are computed as set difference of partition boundary faces minus the reconstructed boundary
1468: 7. check that interface covered by PointSF (union of inward and outward points) is equal to the partition interface for each rank, otherwise print the difference and throw an error
1470: Level: developer
1472: .seealso: DMGetPointSF(), DMPlexCheckSymmetry(), DMPlexCheckSkeleton(), DMPlexCheckFaces()
1473: */
1474: static PetscErrorCode DMPlexCheckPointSFHeavy(DM dm, PortableBoundary bnd)
1475: {
1476: DM ipdm = NULL;
1477: IS boundary_faces_is, interface_faces_is, interface_is;
1478: DMPlexInterpolatedFlag intp;
1479: MPI_Comm comm;
1481: PetscFunctionBegin;
1482: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1484: PetscCall(DMPlexIsInterpolatedCollective(dm, &intp));
1485: if (intp == DMPLEX_INTERPOLATED_FULL) {
1486: ipdm = dm;
1487: } else {
1488: /* create temporary interpolated DM if input DM is not interpolated */
1489: PetscCall(DMPlexSetOrientInterface_Private(dm, PETSC_FALSE));
1490: PetscCall(DMPlexInterpolate(dm, &ipdm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexComparePointSFWithInterface_Private() below */
1491: PetscCall(DMPlexSetOrientInterface_Private(dm, PETSC_TRUE));
1492: }
1493: PetscCall(DMViewFromOptions(ipdm, NULL, "-ipdm_view"));
1495: /* recover ipdm whole-domain boundary faces from the expanded vertices coordinates */
1496: PetscCall(DMPlexExpandedVerticesCoordinatesToFaces_Private(ipdm, bnd, &boundary_faces_is));
1497: /* get inter-partition interface faces (interface_faces_is)*/
1498: PetscCall(DMPlexGetInterfaceFaces_Private(ipdm, boundary_faces_is, &interface_faces_is));
1499: /* compute inter-partition interface including edges and vertices (interface_is) */
1500: PetscCall(DMPlexComputeCompleteInterface_Private(ipdm, interface_faces_is, &interface_is));
1501: /* destroy immediate ISs */
1502: PetscCall(ISDestroy(&boundary_faces_is));
1503: PetscCall(ISDestroy(&interface_faces_is));
1505: /* for uninterpolated case, keep just vertices in interface */
1506: if (!intp) {
1507: PetscCall(DMPlexISFilterVertices_Private(ipdm, interface_is));
1508: PetscCall(DMDestroy(&ipdm));
1509: }
1511: /* compare PointSF with the boundary reconstructed from coordinates */
1512: PetscCall(DMPlexComparePointSFWithInterface_Private(dm, interface_is));
1513: PetscCall(PetscPrintf(comm, "DMPlexCheckPointSFHeavy PASSED\n"));
1514: PetscCall(ISDestroy(&interface_is));
1515: PetscFunctionReturn(PETSC_SUCCESS);
1516: }
1518: int main(int argc, char **argv)
1519: {
1520: DM dm;
1521: AppCtx user;
1523: PetscFunctionBeginUser;
1524: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
1525: PetscCall(PetscLogStageRegister("create", &stage[0]));
1526: PetscCall(PetscLogStageRegister("distribute", &stage[1]));
1527: PetscCall(PetscLogStageRegister("interpolate", &stage[2]));
1528: PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
1529: PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
1530: if (user.nPointsToExpand) PetscCall(TestExpandPoints(dm, &user));
1531: if (user.ncoords) {
1532: Vec coords;
1534: PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, user.ncoords, user.ncoords, user.coords, &coords));
1535: PetscCall(ViewVerticesFromCoords(dm, coords, user.coordsTol, PETSC_VIEWER_STDOUT_WORLD));
1536: PetscCall(VecDestroy(&coords));
1537: }
1538: PetscCall(DMDestroy(&dm));
1539: PetscCall(PetscFinalize());
1540: return 0;
1541: }
1543: /*TEST
1545: testset:
1546: nsize: 2
1547: args: -dm_view ascii::ascii_info_detail
1548: args: -dm_plex_check_all
1549: test:
1550: suffix: 1_tri_dist0
1551: args: -distribute 0 -interpolate {{none create}separate output}
1552: test:
1553: suffix: 1_tri_dist1
1554: args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1555: test:
1556: suffix: 1_quad_dist0
1557: args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1558: test:
1559: suffix: 1_quad_dist1
1560: args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}
1561: test:
1562: suffix: 1_1d_dist1
1563: args: -dim 1 -distribute 1
1565: testset:
1566: nsize: 3
1567: args: -testnum 1 -interpolate create
1568: args: -dm_plex_check_all
1569: test:
1570: suffix: 2
1571: args: -dm_view ascii::ascii_info_detail
1572: test:
1573: suffix: 2a
1574: args: -dm_plex_check_cones_conform_on_interfaces_verbose
1575: test:
1576: suffix: 2b
1577: args: -test_expand_points 0,1,2,5,6
1578: test:
1579: suffix: 2c
1580: args: -test_expand_points 0,1,2,5,6 -test_expand_points_empty
1582: testset:
1583: # the same as 1% for 3D
1584: nsize: 2
1585: args: -dim 3 -dm_view ascii::ascii_info_detail
1586: args: -dm_plex_check_all
1587: test:
1588: suffix: 4_tet_dist0
1589: args: -distribute 0 -interpolate {{none create}separate output}
1590: test:
1591: suffix: 4_tet_dist1
1592: args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1593: test:
1594: suffix: 4_hex_dist0
1595: args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1596: test:
1597: suffix: 4_hex_dist1
1598: args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}
1600: test:
1601: # the same as 4_tet_dist0 but test different initial orientations
1602: suffix: 4_tet_test_orient
1603: nsize: 2
1604: args: -dim 3 -distribute 0
1605: args: -dm_plex_check_all
1606: args: -rotate_interface_0 {{0 1 2 11 12 13}}
1607: args: -rotate_interface_1 {{0 1 2 11 12 13}}
1609: testset:
1610: requires: exodusii
1611: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/TwoQuads.exo
1612: args: -dm_view ascii::ascii_info_detail
1613: args: -dm_plex_check_all
1614: args: -custom_view
1615: test:
1616: suffix: 5_seq
1617: nsize: 1
1618: args: -distribute 0 -interpolate {{none create}separate output}
1619: test:
1620: # Detail viewing in a non-distributed mesh is broken because the DMLabelView() is collective, but the label is not shared
1621: suffix: 5_dist0
1622: nsize: 2
1623: args: -distribute 0 -interpolate {{none create}separate output} -dm_view
1624: test:
1625: suffix: 5_dist1
1626: nsize: 2
1627: args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1629: testset:
1630: nsize: {{1 2 4}}
1631: args: -use_generator
1632: args: -dm_plex_check_all
1633: args: -distribute -interpolate none
1634: test:
1635: suffix: 6_tri
1636: requires: triangle
1637: args: -faces {{2,2 1,3 7,4}} -cell_simplex 1 -dm_generator triangle
1638: test:
1639: suffix: 6_quad
1640: args: -faces {{2,2 1,3 7,4}} -cell_simplex 0
1641: test:
1642: suffix: 6_tet
1643: requires: ctetgen
1644: args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1645: test:
1646: suffix: 6_hex
1647: args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 0
1648: testset:
1649: nsize: {{1 2 4}}
1650: args: -use_generator
1651: args: -dm_plex_check_all
1652: args: -distribute -interpolate create
1653: test:
1654: suffix: 6_int_tri
1655: requires: triangle
1656: args: -faces {{2,2 1,3 7,4}} -cell_simplex 1 -dm_generator triangle
1657: test:
1658: suffix: 6_int_quad
1659: args: -faces {{2,2 1,3 7,4}} -cell_simplex 0
1660: test:
1661: suffix: 6_int_tet
1662: requires: ctetgen
1663: args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1664: test:
1665: suffix: 6_int_hex
1666: args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 0
1667: testset:
1668: nsize: {{2 4}}
1669: args: -use_generator
1670: args: -dm_plex_check_all
1671: args: -distribute -interpolate after_distribute
1672: test:
1673: suffix: 6_parint_tri
1674: requires: triangle
1675: args: -faces {{2,2 1,3 7,4}} -cell_simplex 1 -dm_generator triangle
1676: test:
1677: suffix: 6_parint_quad
1678: args: -faces {{2,2 1,3 7,4}} -cell_simplex 0
1679: test:
1680: suffix: 6_parint_tet
1681: requires: ctetgen
1682: args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1683: test:
1684: suffix: 6_parint_hex
1685: args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 0
1687: testset: # 7 EXODUS
1688: requires: exodusii
1689: args: -dm_plex_check_all
1690: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
1691: args: -distribute
1692: test: # seq load, simple partitioner
1693: suffix: 7_exo
1694: nsize: {{1 2 4 5}}
1695: args: -interpolate none
1696: test: # seq load, seq interpolation, simple partitioner
1697: suffix: 7_exo_int_simple
1698: nsize: {{1 2 4 5}}
1699: args: -interpolate create
1700: test: # seq load, seq interpolation, metis partitioner
1701: suffix: 7_exo_int_metis
1702: requires: parmetis
1703: nsize: {{2 4 5}}
1704: args: -interpolate create
1705: args: -petscpartitioner_type parmetis
1706: test: # seq load, simple partitioner, par interpolation
1707: suffix: 7_exo_simple_int
1708: nsize: {{2 4 5}}
1709: args: -interpolate after_distribute
1710: test: # seq load, metis partitioner, par interpolation
1711: suffix: 7_exo_metis_int
1712: requires: parmetis
1713: nsize: {{2 4 5}}
1714: args: -interpolate after_distribute
1715: args: -petscpartitioner_type parmetis
1717: testset: # 7 HDF5 SEQUANTIAL LOAD
1718: requires: hdf5 !complex
1719: args: -dm_plex_check_all
1720: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1721: args: -dm_plex_hdf5_force_sequential
1722: args: -distribute
1723: test: # seq load, simple partitioner
1724: suffix: 7_seq_hdf5_simple
1725: nsize: {{1 2 4 5}}
1726: args: -interpolate none
1727: test: # seq load, seq interpolation, simple partitioner
1728: suffix: 7_seq_hdf5_int_simple
1729: nsize: {{1 2 4 5}}
1730: args: -interpolate after_create
1731: test: # seq load, seq interpolation, metis partitioner
1732: nsize: {{2 4 5}}
1733: suffix: 7_seq_hdf5_int_metis
1734: requires: parmetis
1735: args: -interpolate after_create
1736: args: -petscpartitioner_type parmetis
1737: test: # seq load, simple partitioner, par interpolation
1738: suffix: 7_seq_hdf5_simple_int
1739: nsize: {{2 4 5}}
1740: args: -interpolate after_distribute
1741: test: # seq load, metis partitioner, par interpolation
1742: nsize: {{2 4 5}}
1743: suffix: 7_seq_hdf5_metis_int
1744: requires: parmetis
1745: args: -interpolate after_distribute
1746: args: -petscpartitioner_type parmetis
1748: testset: # 7 HDF5 PARALLEL LOAD
1749: requires: hdf5 !complex
1750: nsize: {{2 4 5}}
1751: args: -dm_plex_check_all
1752: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1753: test: # par load
1754: suffix: 7_par_hdf5
1755: args: -interpolate none
1756: test: # par load, par interpolation
1757: suffix: 7_par_hdf5_int
1758: args: -interpolate after_create
1759: test: # par load, parmetis repartitioner
1760: TODO: Parallel partitioning of uninterpolated meshes not supported
1761: suffix: 7_par_hdf5_parmetis
1762: requires: parmetis
1763: args: -distribute -petscpartitioner_type parmetis
1764: args: -interpolate none
1765: test: # par load, par interpolation, parmetis repartitioner
1766: suffix: 7_par_hdf5_int_parmetis
1767: requires: parmetis
1768: args: -distribute -petscpartitioner_type parmetis
1769: args: -interpolate after_create
1770: test: # par load, parmetis partitioner, par interpolation
1771: TODO: Parallel partitioning of uninterpolated meshes not supported
1772: suffix: 7_par_hdf5_parmetis_int
1773: requires: parmetis
1774: args: -distribute -petscpartitioner_type parmetis
1775: args: -interpolate after_distribute
1777: test:
1778: suffix: 7_hdf5_hierarch
1779: requires: hdf5 ptscotch !complex
1780: nsize: {{2 3 4}separate output}
1781: args: -distribute
1782: args: -interpolate after_create
1783: args: -petscpartitioner_type matpartitioning -petscpartitioner_view ::ascii_info
1784: args: -mat_partitioning_type hierarch -mat_partitioning_hierarchical_nfineparts 2
1785: args: -mat_partitioning_hierarchical_coarseparttype ptscotch -mat_partitioning_hierarchical_fineparttype ptscotch
1787: test:
1788: suffix: 8
1789: requires: hdf5 !complex
1790: nsize: 4
1791: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1792: args: -distribute 0 -interpolate after_create
1793: args: -view_vertices_from_coords 0.,1.,0.,-0.5,1.,0.,0.583,-0.644,0.,-2.,-2.,-2. -view_vertices_from_coords_tol 1e-3
1794: args: -dm_plex_check_all
1795: args: -custom_view
1797: testset: # 9 HDF5 SEQUANTIAL LOAD
1798: requires: hdf5 !complex datafilespath
1799: args: -dm_plex_check_all
1800: args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1801: args: -dm_plex_hdf5_force_sequential
1802: args: -distribute
1803: test: # seq load, simple partitioner
1804: suffix: 9_seq_hdf5_simple
1805: nsize: {{1 2 4 5}}
1806: args: -interpolate none
1807: test: # seq load, seq interpolation, simple partitioner
1808: suffix: 9_seq_hdf5_int_simple
1809: nsize: {{1 2 4 5}}
1810: args: -interpolate after_create
1811: test: # seq load, seq interpolation, metis partitioner
1812: nsize: {{2 4 5}}
1813: suffix: 9_seq_hdf5_int_metis
1814: requires: parmetis
1815: args: -interpolate after_create
1816: args: -petscpartitioner_type parmetis
1817: test: # seq load, simple partitioner, par interpolation
1818: suffix: 9_seq_hdf5_simple_int
1819: nsize: {{2 4 5}}
1820: args: -interpolate after_distribute
1821: test: # seq load, simple partitioner, par interpolation
1822: # This is like 9_seq_hdf5_simple_int but testing error output of DMPlexCheckPointSFHeavy().
1823: # Once 9_seq_hdf5_simple_int gets fixed, this one gets broken.
1824: # We can then provide an intentionally broken mesh instead.
1825: TODO: This test is broken because PointSF is fixed.
1826: suffix: 9_seq_hdf5_simple_int_err
1827: nsize: 4
1828: args: -interpolate after_distribute
1829: filter: sed -e "/PETSC ERROR/,$$d"
1830: test: # seq load, metis partitioner, par interpolation
1831: nsize: {{2 4 5}}
1832: suffix: 9_seq_hdf5_metis_int
1833: requires: parmetis
1834: args: -interpolate after_distribute
1835: args: -petscpartitioner_type parmetis
1837: testset: # 9 HDF5 PARALLEL LOAD
1838: requires: hdf5 !complex datafilespath
1839: nsize: {{2 4 5}}
1840: args: -dm_plex_check_all
1841: args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1842: test: # par load
1843: suffix: 9_par_hdf5
1844: args: -interpolate none
1845: test: # par load, par interpolation
1846: suffix: 9_par_hdf5_int
1847: args: -interpolate after_create
1848: test: # par load, parmetis repartitioner
1849: TODO: Parallel partitioning of uninterpolated meshes not supported
1850: suffix: 9_par_hdf5_parmetis
1851: requires: parmetis
1852: args: -distribute -petscpartitioner_type parmetis
1853: args: -interpolate none
1854: test: # par load, par interpolation, parmetis repartitioner
1855: suffix: 9_par_hdf5_int_parmetis
1856: requires: parmetis
1857: args: -distribute -petscpartitioner_type parmetis
1858: args: -interpolate after_create
1859: test: # par load, parmetis partitioner, par interpolation
1860: TODO: Parallel partitioning of uninterpolated meshes not supported
1861: suffix: 9_par_hdf5_parmetis_int
1862: requires: parmetis
1863: args: -distribute -petscpartitioner_type parmetis
1864: args: -interpolate after_distribute
1866: testset: # 10 HDF5 PARALLEL LOAD
1867: requires: hdf5 !complex datafilespath
1868: nsize: {{2 4 7}}
1869: args: -dm_plex_check_all
1870: args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined2.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /topo -dm_plex_hdf5_geometry_path /geom
1871: test: # par load, par interpolation
1872: suffix: 10_par_hdf5_int
1873: args: -interpolate after_create
1874: TEST*/