Actual source code: plexexodusii.c
1: #define PETSCDM_DLL
2: #include <petsc/private/dmpleximpl.h>
4: #if defined(PETSC_HAVE_EXODUSII)
5: #include <netcdf.h>
6: #include <exodusII.h>
7: #endif
9: #include <petsc/private/viewerimpl.h>
10: #include <petsc/private/viewerexodusiiimpl.h>
11: #if defined(PETSC_HAVE_EXODUSII)
12: /*@C
13: PETSC_VIEWER_EXODUSII_ - Creates an `PETSCVIEWEREXODUSII` `PetscViewer` shared by all processors in a communicator.
15: Collective; No Fortran Support
17: Input Parameter:
18: . comm - the MPI communicator to share the `PETSCVIEWEREXODUSII` `PetscViewer`
20: Level: intermediate
22: Note:
23: Unlike almost all other PETSc routines, `PETSC_VIEWER_EXODUSII_()` does not return
24: an error code. The GLVIS PetscViewer is usually used in the form
25: $ XXXView(XXX object, PETSC_VIEWER_EXODUSII_(comm));
27: .seealso: `PETSCVIEWEREXODUSII`, `PetscViewer`, `PetscViewer`, `PetscViewerExodusIIOpen()`, `PetscViewerType`, `PetscViewerCreate()`, `PetscViewerDestroy()`
28: @*/
29: PetscViewer PETSC_VIEWER_EXODUSII_(MPI_Comm comm)
30: {
31: PetscViewer viewer;
32: PetscErrorCode ierr;
34: PetscFunctionBegin;
35: ierr = PetscViewerExodusIIOpen(comm, "mesh.exo", FILE_MODE_WRITE, &viewer);
36: if (ierr) {
37: ierr = PetscError(PETSC_COMM_SELF, __LINE__, "PETSC_VIEWER_EXODUSII_", __FILE__, PETSC_ERR_PLIB, PETSC_ERROR_INITIAL, " ");
38: PetscFunctionReturn(NULL);
39: }
40: ierr = PetscObjectRegisterDestroy((PetscObject)viewer);
41: if (ierr) {
42: ierr = PetscError(PETSC_COMM_SELF, __LINE__, "PETSC_VIEWER_EXODUSII_", __FILE__, PETSC_ERR_PLIB, PETSC_ERROR_INITIAL, " ");
43: PetscFunctionReturn(NULL);
44: }
45: PetscFunctionReturn(viewer);
46: }
48: static PetscErrorCode PetscViewerView_ExodusII(PetscViewer v, PetscViewer viewer)
49: {
50: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *)v->data;
52: PetscFunctionBegin;
53: if (exo->filename) PetscCall(PetscViewerASCIIPrintf(viewer, "Filename: %s\n", exo->filename));
54: if (exo->exoid) PetscCall(PetscViewerASCIIPrintf(viewer, "exoid: %d\n", exo->exoid));
55: if (exo->btype) PetscCall(PetscViewerASCIIPrintf(viewer, "IO Mode: %d\n", exo->btype));
56: if (exo->order) PetscCall(PetscViewerASCIIPrintf(viewer, "Mesh order: %" PetscInt_FMT "\n", exo->order));
57: PetscFunctionReturn(PETSC_SUCCESS);
58: }
60: static PetscErrorCode PetscViewerSetFromOptions_ExodusII(PetscViewer v, PetscOptionItems *PetscOptionsObject)
61: {
62: PetscFunctionBegin;
63: PetscOptionsHeadBegin(PetscOptionsObject, "ExodusII PetscViewer Options");
64: PetscOptionsHeadEnd();
65: PetscFunctionReturn(PETSC_SUCCESS);
66: }
68: static PetscErrorCode PetscViewerSetUp_ExodusII(PetscViewer viewer)
69: {
70: PetscFunctionBegin;
71: PetscFunctionReturn(PETSC_SUCCESS);
72: }
74: static PetscErrorCode PetscViewerDestroy_ExodusII(PetscViewer viewer)
75: {
76: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *)viewer->data;
78: PetscFunctionBegin;
79: if (exo->exoid >= 0) PetscCallExternal(ex_close, exo->exoid);
80: PetscCall(PetscFree(exo->filename));
81: PetscCall(PetscFree(exo));
82: PetscCall(PetscObjectComposeFunction((PetscObject)viewer, "PetscViewerFileSetName_C", NULL));
83: PetscCall(PetscObjectComposeFunction((PetscObject)viewer, "PetscViewerFileGetName_C", NULL));
84: PetscCall(PetscObjectComposeFunction((PetscObject)viewer, "PetscViewerFileSetMode_C", NULL));
85: PetscCall(PetscObjectComposeFunction((PetscObject)viewer, "PetscViewerFileGetMode_C", NULL));
86: PetscCall(PetscObjectComposeFunction((PetscObject)viewer, "PetscViewerGetId_C", NULL));
87: PetscCall(PetscObjectComposeFunction((PetscObject)viewer, "PetscViewerGetOrder_C", NULL));
88: PetscCall(PetscObjectComposeFunction((PetscObject)viewer, "PetscViewerSetOrder_C", NULL));
89: PetscFunctionReturn(PETSC_SUCCESS);
90: }
92: static PetscErrorCode PetscViewerFileSetName_ExodusII(PetscViewer viewer, const char name[])
93: {
94: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *)viewer->data;
95: PetscMPIInt rank;
96: int CPU_word_size, IO_word_size, EXO_mode;
97: MPI_Info mpi_info = MPI_INFO_NULL;
98: float EXO_version;
100: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)viewer), &rank));
101: CPU_word_size = sizeof(PetscReal);
102: IO_word_size = sizeof(PetscReal);
104: PetscFunctionBegin;
105: if (exo->exoid >= 0) {
106: PetscCallExternal(ex_close, exo->exoid);
107: exo->exoid = -1;
108: }
109: if (exo->filename) PetscCall(PetscFree(exo->filename));
110: PetscCall(PetscStrallocpy(name, &exo->filename));
111: switch (exo->btype) {
112: case FILE_MODE_READ:
113: EXO_mode = EX_READ;
114: break;
115: case FILE_MODE_APPEND:
116: case FILE_MODE_UPDATE:
117: case FILE_MODE_APPEND_UPDATE:
118: /* Will fail if the file does not already exist */
119: EXO_mode = EX_WRITE;
120: break;
121: case FILE_MODE_WRITE:
122: /*
123: exodus only allows writing geometry upon file creation, so we will let DMView create the file.
124: */
125: PetscFunctionReturn(PETSC_SUCCESS);
126: break;
127: default:
128: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ORDER, "Must call PetscViewerFileSetMode() before PetscViewerFileSetName()");
129: }
130: #if defined(PETSC_USE_64BIT_INDICES)
131: EXO_mode += EX_ALL_INT64_API;
132: #endif
133: exo->exoid = ex_open_par(name, EXO_mode, &CPU_word_size, &IO_word_size, &EXO_version, PETSC_COMM_WORLD, mpi_info);
134: PetscCheck(exo->exoid >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_open_par failed for %s", name);
135: PetscFunctionReturn(PETSC_SUCCESS);
136: }
138: static PetscErrorCode PetscViewerFileGetName_ExodusII(PetscViewer viewer, const char **name)
139: {
140: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *)viewer->data;
142: PetscFunctionBegin;
143: *name = exo->filename;
144: PetscFunctionReturn(PETSC_SUCCESS);
145: }
147: static PetscErrorCode PetscViewerFileSetMode_ExodusII(PetscViewer viewer, PetscFileMode type)
148: {
149: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *)viewer->data;
151: PetscFunctionBegin;
152: exo->btype = type;
153: PetscFunctionReturn(PETSC_SUCCESS);
154: }
156: static PetscErrorCode PetscViewerFileGetMode_ExodusII(PetscViewer viewer, PetscFileMode *type)
157: {
158: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *)viewer->data;
160: PetscFunctionBegin;
161: *type = exo->btype;
162: PetscFunctionReturn(PETSC_SUCCESS);
163: }
165: static PetscErrorCode PetscViewerExodusIIGetId_ExodusII(PetscViewer viewer, int *exoid)
166: {
167: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *)viewer->data;
169: PetscFunctionBegin;
170: *exoid = exo->exoid;
171: PetscFunctionReturn(PETSC_SUCCESS);
172: }
174: static PetscErrorCode PetscViewerExodusIIGetOrder_ExodusII(PetscViewer viewer, PetscInt *order)
175: {
176: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *)viewer->data;
178: PetscFunctionBegin;
179: *order = exo->order;
180: PetscFunctionReturn(PETSC_SUCCESS);
181: }
183: static PetscErrorCode PetscViewerExodusIISetOrder_ExodusII(PetscViewer viewer, PetscInt order)
184: {
185: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *)viewer->data;
187: PetscFunctionBegin;
188: exo->order = order;
189: PetscFunctionReturn(PETSC_SUCCESS);
190: }
192: /*MC
193: PETSCVIEWEREXODUSII - A viewer that writes to an Exodus II file
195: Level: beginner
197: .seealso: `PetscViewerExodusIIOpen()`, `PetscViewerCreate()`, `PETSCVIEWERBINARY`, `PETSCVIEWERHDF5`, `DMView()`,
198: `PetscViewerFileSetName()`, `PetscViewerFileSetMode()`, `PetscViewerFormat`, `PetscViewerType`, `PetscViewerSetType()`
199: M*/
201: PETSC_EXTERN PetscErrorCode PetscViewerCreate_ExodusII(PetscViewer v)
202: {
203: PetscViewer_ExodusII *exo;
205: PetscFunctionBegin;
206: PetscCall(PetscNew(&exo));
208: v->data = (void *)exo;
209: v->ops->destroy = PetscViewerDestroy_ExodusII;
210: v->ops->setfromoptions = PetscViewerSetFromOptions_ExodusII;
211: v->ops->setup = PetscViewerSetUp_ExodusII;
212: v->ops->view = PetscViewerView_ExodusII;
213: v->ops->flush = 0;
214: exo->btype = (PetscFileMode)-1;
215: exo->filename = 0;
216: exo->exoid = -1;
218: PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscViewerFileSetName_C", PetscViewerFileSetName_ExodusII));
219: PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscViewerFileGetName_C", PetscViewerFileGetName_ExodusII));
220: PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscViewerFileSetMode_C", PetscViewerFileSetMode_ExodusII));
221: PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscViewerFileGetMode_C", PetscViewerFileGetMode_ExodusII));
222: PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscViewerGetId_C", PetscViewerExodusIIGetId_ExodusII));
223: PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscViewerSetOrder_C", PetscViewerExodusIISetOrder_ExodusII));
224: PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscViewerGetOrder_C", PetscViewerExodusIIGetOrder_ExodusII));
225: PetscFunctionReturn(PETSC_SUCCESS);
226: }
228: /*
229: EXOGetVarIndex - Locate a result in an exodus file based on its name
231: Collective
233: Input Parameters:
234: + exoid - the exodus id of a file (obtained from ex_open or ex_create for instance)
235: . obj_type - the type of entity for instance EX_NODAL, EX_ELEM_BLOCK
236: - name - the name of the result
238: Output Parameter:
239: . varIndex - the location in the exodus file of the result
241: Level: beginner
243: Notes:
244: The exodus variable index is obtained by comparing name and the
245: names of zonal variables declared in the exodus file. For instance if name is "V"
246: the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
247: amongst all variables of type obj_type.
249: .seealso: `EXOGetVarIndex()`, `DMPlexView_ExodusII_Internal()`, `VecViewPlex_ExodusII_Nodal_Internal()`, `VecLoadNodal_PlexEXO()`, `VecLoadZonal_PlexEXO()`
250: */
251: PetscErrorCode EXOGetVarIndex_Internal(int exoid, ex_entity_type obj_type, const char name[], int *varIndex)
252: {
253: int num_vars, i, j;
254: char ext_name[MAX_STR_LENGTH + 1], var_name[MAX_STR_LENGTH + 1];
255: const int num_suffix = 5;
256: char *suffix[5];
257: PetscBool flg;
259: PetscFunctionBegin;
260: suffix[0] = (char *)"";
261: suffix[1] = (char *)"_X";
262: suffix[2] = (char *)"_XX";
263: suffix[3] = (char *)"_1";
264: suffix[4] = (char *)"_11";
266: *varIndex = -1;
267: PetscCallExternal(ex_get_variable_param, exoid, obj_type, &num_vars);
268: for (i = 0; i < num_vars; ++i) {
269: PetscCallExternal(ex_get_variable_name, exoid, obj_type, i + 1, var_name);
270: for (j = 0; j < num_suffix; ++j) {
271: PetscCall(PetscStrncpy(ext_name, name, MAX_STR_LENGTH));
272: PetscCall(PetscStrlcat(ext_name, suffix[j], MAX_STR_LENGTH));
273: PetscCall(PetscStrcasecmp(ext_name, var_name, &flg));
274: if (flg) *varIndex = i + 1;
275: }
276: }
277: PetscFunctionReturn(PETSC_SUCCESS);
278: }
280: /*
281: DMView_PlexExodusII - Write a `DM` to disk in exodus format
283: Collective
285: Input Parameters:
286: + dm - The dm to be written
287: . viewer - an exodusII viewer
289: Level: beginner
291: Notes:
292: Not all DM can be written to disk this way. For instance, exodus assume that element blocks (mapped to "Cell sets" labels)
293: consists of sequentially numbered cells. If this is not the case, the exodus file will be corrupted.
295: If `dm` has been distributed, only the part of the `DM` on MPI rank 0 (including "ghost" cells and vertices)
296: will be written.
298: `DMPLEX` only represents geometry while most post-processing software expect that a mesh also provides information
299: on the discretization space. This function assumes that the file represents Lagrange finite elements of order 1 or 2.
300: The order of the mesh shall be set using `PetscViewerExodusIISetOrder()`
301: It should be extended to use `PetscFE` objects.
303: This function will only handle TRI, TET, QUAD, and HEX cells.
305: .seealso: `DMPLEX`
306: */
307: PetscErrorCode DMView_PlexExodusII(DM dm, PetscViewer viewer)
308: {
309: enum ElemType {
310: TRI,
311: QUAD,
312: TET,
313: HEX
314: };
315: MPI_Comm comm;
316: PetscInt degree; /* the order of the mesh */
317: /* Connectivity Variables */
318: PetscInt cellsNotInConnectivity;
319: /* Cell Sets */
320: DMLabel csLabel;
321: IS csIS;
322: const PetscInt *csIdx;
323: PetscInt num_cs, cs;
324: enum ElemType *type;
325: PetscBool hasLabel;
326: /* Coordinate Variables */
327: DM cdm;
328: PetscSection coordSection;
329: Vec coord;
330: PetscInt **nodes;
331: PetscInt depth, d, dim, skipCells = 0;
332: PetscInt pStart, pEnd, p, cStart, cEnd, numCells, vStart, vEnd, numVertices, eStart, eEnd, numEdges, fStart, fEnd, numFaces, numNodes;
333: PetscInt num_vs, num_fs;
334: PetscMPIInt rank, size;
335: const char *dmName;
336: PetscInt nodesTriP1[4] = {3, 0, 0, 0};
337: PetscInt nodesTriP2[4] = {3, 3, 0, 0};
338: PetscInt nodesQuadP1[4] = {4, 0, 0, 0};
339: PetscInt nodesQuadP2[4] = {4, 4, 0, 1};
340: PetscInt nodesTetP1[4] = {4, 0, 0, 0};
341: PetscInt nodesTetP2[4] = {4, 6, 0, 0};
342: PetscInt nodesHexP1[4] = {8, 0, 0, 0};
343: PetscInt nodesHexP2[4] = {8, 12, 6, 1};
344: int CPU_word_size, IO_word_size, EXO_mode;
345: float EXO_version;
347: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *)viewer->data;
349: PetscFunctionBegin;
350: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
351: PetscCallMPI(MPI_Comm_rank(comm, &rank));
352: PetscCallMPI(MPI_Comm_size(comm, &size));
354: /*
355: Creating coordSection is a collective operation so we do it somewhat out of sequence
356: */
357: PetscCall(PetscSectionCreate(comm, &coordSection));
358: PetscCall(DMGetCoordinatesLocalSetUp(dm));
359: /*
360: Check that all points are on rank 0 since we don't know how to save distributed DM in exodus format
361: */
362: PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd));
363: PetscCall(DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd));
364: PetscCall(DMPlexGetDepthStratum(dm, 1, &eStart, &eEnd));
365: PetscCall(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd));
366: numCells = cEnd - cStart;
367: numEdges = eEnd - eStart;
368: numVertices = vEnd - vStart;
369: PetscCheck(!(rank && (numCells || numEdges || numVertices)), PETSC_COMM_SELF, PETSC_ERR_SUP, "Writing distributed DM in exodusII format not supported");
370: if (rank == 0) {
371: switch (exo->btype) {
372: case FILE_MODE_READ:
373: case FILE_MODE_APPEND:
374: case FILE_MODE_UPDATE:
375: case FILE_MODE_APPEND_UPDATE:
376: /* exodusII does not allow writing geometry to an existing file */
377: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "cannot add geometry to existing file %s", exo->filename);
378: case FILE_MODE_WRITE:
379: /* Create an empty file if one already exists*/
380: EXO_mode = EX_CLOBBER;
381: #if defined(PETSC_USE_64BIT_INDICES)
382: EXO_mode += EX_ALL_INT64_API;
383: #endif
384: CPU_word_size = sizeof(PetscReal);
385: IO_word_size = sizeof(PetscReal);
386: exo->exoid = ex_create(exo->filename, EXO_mode, &CPU_word_size, &IO_word_size);
387: PetscCheck(exo->exoid >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_create failed for %s", exo->filename);
389: break;
390: default:
391: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ORDER, "Must call PetscViewerFileSetMode() before PetscViewerFileSetName()");
392: }
394: /* --- Get DM info --- */
395: PetscCall(PetscObjectGetName((PetscObject)dm, &dmName));
396: PetscCall(DMPlexGetDepth(dm, &depth));
397: PetscCall(DMGetDimension(dm, &dim));
398: PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
399: if (depth == 3) {
400: numFaces = fEnd - fStart;
401: } else {
402: numFaces = 0;
403: }
404: PetscCall(DMGetLabelSize(dm, "Cell Sets", &num_cs));
405: PetscCall(DMGetLabelSize(dm, "Vertex Sets", &num_vs));
406: PetscCall(DMGetLabelSize(dm, "Face Sets", &num_fs));
407: PetscCall(DMGetCoordinatesLocal(dm, &coord));
408: PetscCall(DMGetCoordinateDM(dm, &cdm));
409: if (num_cs > 0) {
410: PetscCall(DMGetLabel(dm, "Cell Sets", &csLabel));
411: PetscCall(DMLabelGetValueIS(csLabel, &csIS));
412: PetscCall(ISGetIndices(csIS, &csIdx));
413: }
414: PetscCall(PetscMalloc1(num_cs, &nodes));
415: /* Set element type for each block and compute total number of nodes */
416: PetscCall(PetscMalloc1(num_cs, &type));
417: numNodes = numVertices;
419: PetscCall(PetscViewerExodusIIGetOrder(viewer, °ree));
420: if (degree == 2) numNodes += numEdges;
421: cellsNotInConnectivity = numCells;
422: for (cs = 0; cs < num_cs; ++cs) {
423: IS stratumIS;
424: const PetscInt *cells;
425: PetscScalar *xyz = NULL;
426: PetscInt csSize, closureSize;
428: PetscCall(DMLabelGetStratumIS(csLabel, csIdx[cs], &stratumIS));
429: PetscCall(ISGetIndices(stratumIS, &cells));
430: PetscCall(ISGetSize(stratumIS, &csSize));
431: PetscCall(DMPlexVecGetClosure(cdm, NULL, coord, cells[0], &closureSize, &xyz));
432: switch (dim) {
433: case 2:
434: if (closureSize == 3 * dim) {
435: type[cs] = TRI;
436: } else if (closureSize == 4 * dim) {
437: type[cs] = QUAD;
438: } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of vertices %" PetscInt_FMT " in dimension %" PetscInt_FMT " has no ExodusII type", closureSize / dim, dim);
439: break;
440: case 3:
441: if (closureSize == 4 * dim) {
442: type[cs] = TET;
443: } else if (closureSize == 8 * dim) {
444: type[cs] = HEX;
445: } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of vertices %" PetscInt_FMT " in dimension %" PetscInt_FMT " has no ExodusII type", closureSize / dim, dim);
446: break;
447: default:
448: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Dimension %" PetscInt_FMT " not handled by ExodusII viewer", dim);
449: }
450: if ((degree == 2) && (type[cs] == QUAD)) numNodes += csSize;
451: if ((degree == 2) && (type[cs] == HEX)) {
452: numNodes += csSize;
453: numNodes += numFaces;
454: }
455: PetscCall(DMPlexVecRestoreClosure(cdm, NULL, coord, cells[0], &closureSize, &xyz));
456: /* Set nodes and Element type */
457: if (type[cs] == TRI) {
458: if (degree == 1) nodes[cs] = nodesTriP1;
459: else if (degree == 2) nodes[cs] = nodesTriP2;
460: } else if (type[cs] == QUAD) {
461: if (degree == 1) nodes[cs] = nodesQuadP1;
462: else if (degree == 2) nodes[cs] = nodesQuadP2;
463: } else if (type[cs] == TET) {
464: if (degree == 1) nodes[cs] = nodesTetP1;
465: else if (degree == 2) nodes[cs] = nodesTetP2;
466: } else if (type[cs] == HEX) {
467: if (degree == 1) nodes[cs] = nodesHexP1;
468: else if (degree == 2) nodes[cs] = nodesHexP2;
469: }
470: /* Compute the number of cells not in the connectivity table */
471: cellsNotInConnectivity -= nodes[cs][3] * csSize;
473: PetscCall(ISRestoreIndices(stratumIS, &cells));
474: PetscCall(ISDestroy(&stratumIS));
475: }
476: if (num_cs) PetscCallExternal(ex_put_init, exo->exoid, dmName, dim, numNodes, numCells, num_cs, num_vs, num_fs);
477: /* --- Connectivity --- */
478: for (cs = 0; cs < num_cs; ++cs) {
479: IS stratumIS;
480: const PetscInt *cells;
481: PetscInt *connect, off = 0;
482: PetscInt edgesInClosure = 0, facesInClosure = 0, verticesInClosure = 0;
483: PetscInt csSize, c, connectSize, closureSize;
484: char *elem_type = NULL;
485: char elem_type_tri3[] = "TRI3", elem_type_quad4[] = "QUAD4";
486: char elem_type_tri6[] = "TRI6", elem_type_quad9[] = "QUAD9";
487: char elem_type_tet4[] = "TET4", elem_type_hex8[] = "HEX8";
488: char elem_type_tet10[] = "TET10", elem_type_hex27[] = "HEX27";
490: PetscCall(DMLabelGetStratumIS(csLabel, csIdx[cs], &stratumIS));
491: PetscCall(ISGetIndices(stratumIS, &cells));
492: PetscCall(ISGetSize(stratumIS, &csSize));
493: /* Set Element type */
494: if (type[cs] == TRI) {
495: if (degree == 1) elem_type = elem_type_tri3;
496: else if (degree == 2) elem_type = elem_type_tri6;
497: } else if (type[cs] == QUAD) {
498: if (degree == 1) elem_type = elem_type_quad4;
499: else if (degree == 2) elem_type = elem_type_quad9;
500: } else if (type[cs] == TET) {
501: if (degree == 1) elem_type = elem_type_tet4;
502: else if (degree == 2) elem_type = elem_type_tet10;
503: } else if (type[cs] == HEX) {
504: if (degree == 1) elem_type = elem_type_hex8;
505: else if (degree == 2) elem_type = elem_type_hex27;
506: }
507: connectSize = nodes[cs][0] + nodes[cs][1] + nodes[cs][2] + nodes[cs][3];
508: PetscCall(PetscMalloc1(PetscMax(27, connectSize) * csSize, &connect));
509: PetscCallExternal(ex_put_block, exo->exoid, EX_ELEM_BLOCK, csIdx[cs], elem_type, csSize, connectSize, 0, 0, 1);
510: /* Find number of vertices, edges, and faces in the closure */
511: verticesInClosure = nodes[cs][0];
512: if (depth > 1) {
513: if (dim == 2) {
514: PetscCall(DMPlexGetConeSize(dm, cells[0], &edgesInClosure));
515: } else if (dim == 3) {
516: PetscInt *closure = NULL;
518: PetscCall(DMPlexGetConeSize(dm, cells[0], &facesInClosure));
519: PetscCall(DMPlexGetTransitiveClosure(dm, cells[0], PETSC_TRUE, &closureSize, &closure));
520: edgesInClosure = closureSize - facesInClosure - 1 - verticesInClosure;
521: PetscCall(DMPlexRestoreTransitiveClosure(dm, cells[0], PETSC_TRUE, &closureSize, &closure));
522: }
523: }
524: /* Get connectivity for each cell */
525: for (c = 0; c < csSize; ++c) {
526: PetscInt *closure = NULL;
527: PetscInt temp, i;
529: PetscCall(DMPlexGetTransitiveClosure(dm, cells[c], PETSC_TRUE, &closureSize, &closure));
530: for (i = 0; i < connectSize; ++i) {
531: if (i < nodes[cs][0]) { /* Vertices */
532: connect[i + off] = closure[(i + edgesInClosure + facesInClosure + 1) * 2] + 1;
533: connect[i + off] -= cellsNotInConnectivity;
534: } else if (i < nodes[cs][0] + nodes[cs][1]) { /* Edges */
535: connect[i + off] = closure[(i - verticesInClosure + facesInClosure + 1) * 2] + 1;
536: if (nodes[cs][2] == 0) connect[i + off] -= numFaces;
537: connect[i + off] -= cellsNotInConnectivity;
538: } else if (i < nodes[cs][0] + nodes[cs][1] + nodes[cs][3]) { /* Cells */
539: connect[i + off] = closure[0] + 1;
540: connect[i + off] -= skipCells;
541: } else if (i < nodes[cs][0] + nodes[cs][1] + nodes[cs][3] + nodes[cs][2]) { /* Faces */
542: connect[i + off] = closure[(i - edgesInClosure - verticesInClosure) * 2] + 1;
543: connect[i + off] -= cellsNotInConnectivity;
544: } else {
545: connect[i + off] = -1;
546: }
547: }
548: /* Tetrahedra are inverted */
549: if (type[cs] == TET) {
550: temp = connect[0 + off];
551: connect[0 + off] = connect[1 + off];
552: connect[1 + off] = temp;
553: if (degree == 2) {
554: temp = connect[5 + off];
555: connect[5 + off] = connect[6 + off];
556: connect[6 + off] = temp;
557: temp = connect[7 + off];
558: connect[7 + off] = connect[8 + off];
559: connect[8 + off] = temp;
560: }
561: }
562: /* Hexahedra are inverted */
563: if (type[cs] == HEX) {
564: temp = connect[1 + off];
565: connect[1 + off] = connect[3 + off];
566: connect[3 + off] = temp;
567: if (degree == 2) {
568: temp = connect[8 + off];
569: connect[8 + off] = connect[11 + off];
570: connect[11 + off] = temp;
571: temp = connect[9 + off];
572: connect[9 + off] = connect[10 + off];
573: connect[10 + off] = temp;
574: temp = connect[16 + off];
575: connect[16 + off] = connect[17 + off];
576: connect[17 + off] = temp;
577: temp = connect[18 + off];
578: connect[18 + off] = connect[19 + off];
579: connect[19 + off] = temp;
581: temp = connect[12 + off];
582: connect[12 + off] = connect[16 + off];
583: connect[16 + off] = temp;
584: temp = connect[13 + off];
585: connect[13 + off] = connect[17 + off];
586: connect[17 + off] = temp;
587: temp = connect[14 + off];
588: connect[14 + off] = connect[18 + off];
589: connect[18 + off] = temp;
590: temp = connect[15 + off];
591: connect[15 + off] = connect[19 + off];
592: connect[19 + off] = temp;
594: temp = connect[23 + off];
595: connect[23 + off] = connect[26 + off];
596: connect[26 + off] = temp;
597: temp = connect[24 + off];
598: connect[24 + off] = connect[25 + off];
599: connect[25 + off] = temp;
600: temp = connect[25 + off];
601: connect[25 + off] = connect[26 + off];
602: connect[26 + off] = temp;
603: }
604: }
605: off += connectSize;
606: PetscCall(DMPlexRestoreTransitiveClosure(dm, cells[c], PETSC_TRUE, &closureSize, &closure));
607: }
608: PetscCallExternal(ex_put_conn, exo->exoid, EX_ELEM_BLOCK, csIdx[cs], connect, 0, 0);
609: skipCells += (nodes[cs][3] == 0) * csSize;
610: PetscCall(PetscFree(connect));
611: PetscCall(ISRestoreIndices(stratumIS, &cells));
612: PetscCall(ISDestroy(&stratumIS));
613: }
614: PetscCall(PetscFree(type));
615: /* --- Coordinates --- */
616: PetscCall(PetscSectionSetChart(coordSection, pStart, pEnd));
617: if (num_cs) {
618: for (d = 0; d < depth; ++d) {
619: PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
620: for (p = pStart; p < pEnd; ++p) PetscCall(PetscSectionSetDof(coordSection, p, nodes[0][d] > 0));
621: }
622: }
623: for (cs = 0; cs < num_cs; ++cs) {
624: IS stratumIS;
625: const PetscInt *cells;
626: PetscInt csSize, c;
628: PetscCall(DMLabelGetStratumIS(csLabel, csIdx[cs], &stratumIS));
629: PetscCall(ISGetIndices(stratumIS, &cells));
630: PetscCall(ISGetSize(stratumIS, &csSize));
631: for (c = 0; c < csSize; ++c) PetscCall(PetscSectionSetDof(coordSection, cells[c], nodes[cs][3] > 0));
632: PetscCall(ISRestoreIndices(stratumIS, &cells));
633: PetscCall(ISDestroy(&stratumIS));
634: }
635: if (num_cs) {
636: PetscCall(ISRestoreIndices(csIS, &csIdx));
637: PetscCall(ISDestroy(&csIS));
638: }
639: PetscCall(PetscFree(nodes));
640: PetscCall(PetscSectionSetUp(coordSection));
641: if (numNodes) {
642: const char *coordNames[3] = {"x", "y", "z"};
643: PetscScalar *closure, *cval;
644: PetscReal *coords;
645: PetscInt hasDof, n = 0;
647: /* There can't be more than 24 values in the closure of a point for the coord coordSection */
648: PetscCall(PetscCalloc3(numNodes * 3, &coords, dim, &cval, 24, &closure));
649: PetscCall(DMGetCoordinatesLocalNoncollective(dm, &coord));
650: PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
651: for (p = pStart; p < pEnd; ++p) {
652: PetscCall(PetscSectionGetDof(coordSection, p, &hasDof));
653: if (hasDof) {
654: PetscInt closureSize = 24, j;
656: PetscCall(DMPlexVecGetClosure(cdm, NULL, coord, p, &closureSize, &closure));
657: for (d = 0; d < dim; ++d) {
658: cval[d] = 0.0;
659: for (j = 0; j < closureSize / dim; j++) cval[d] += closure[j * dim + d];
660: coords[d * numNodes + n] = PetscRealPart(cval[d]) * dim / closureSize;
661: }
662: ++n;
663: }
664: }
665: PetscCallExternal(ex_put_coord, exo->exoid, &coords[0 * numNodes], &coords[1 * numNodes], &coords[2 * numNodes]);
666: PetscCall(PetscFree3(coords, cval, closure));
667: PetscCallExternal(ex_put_coord_names, exo->exoid, (char **)coordNames);
668: }
670: /* --- Node Sets/Vertex Sets --- */
671: PetscCall(DMHasLabel(dm, "Vertex Sets", &hasLabel));
672: if (hasLabel) {
673: PetscInt i, vs, vsSize;
674: const PetscInt *vsIdx, *vertices;
675: PetscInt *nodeList;
676: IS vsIS, stratumIS;
677: DMLabel vsLabel;
678: PetscCall(DMGetLabel(dm, "Vertex Sets", &vsLabel));
679: PetscCall(DMLabelGetValueIS(vsLabel, &vsIS));
680: PetscCall(ISGetIndices(vsIS, &vsIdx));
681: for (vs = 0; vs < num_vs; ++vs) {
682: PetscCall(DMLabelGetStratumIS(vsLabel, vsIdx[vs], &stratumIS));
683: PetscCall(ISGetIndices(stratumIS, &vertices));
684: PetscCall(ISGetSize(stratumIS, &vsSize));
685: PetscCall(PetscMalloc1(vsSize, &nodeList));
686: for (i = 0; i < vsSize; ++i) nodeList[i] = vertices[i] - skipCells + 1;
687: PetscCallExternal(ex_put_set_param, exo->exoid, EX_NODE_SET, vsIdx[vs], vsSize, 0);
688: PetscCallExternal(ex_put_set, exo->exoid, EX_NODE_SET, vsIdx[vs], nodeList, NULL);
689: PetscCall(ISRestoreIndices(stratumIS, &vertices));
690: PetscCall(ISDestroy(&stratumIS));
691: PetscCall(PetscFree(nodeList));
692: }
693: PetscCall(ISRestoreIndices(vsIS, &vsIdx));
694: PetscCall(ISDestroy(&vsIS));
695: }
696: /* --- Side Sets/Face Sets --- */
697: PetscCall(DMHasLabel(dm, "Face Sets", &hasLabel));
698: if (hasLabel) {
699: PetscInt i, j, fs, fsSize;
700: const PetscInt *fsIdx, *faces;
701: IS fsIS, stratumIS;
702: DMLabel fsLabel;
703: PetscInt numPoints, *points;
704: PetscInt elem_list_size = 0;
705: PetscInt *elem_list, *elem_ind, *side_list;
707: PetscCall(DMGetLabel(dm, "Face Sets", &fsLabel));
708: /* Compute size of Node List and Element List */
709: PetscCall(DMLabelGetValueIS(fsLabel, &fsIS));
710: PetscCall(ISGetIndices(fsIS, &fsIdx));
711: for (fs = 0; fs < num_fs; ++fs) {
712: PetscCall(DMLabelGetStratumIS(fsLabel, fsIdx[fs], &stratumIS));
713: PetscCall(ISGetSize(stratumIS, &fsSize));
714: elem_list_size += fsSize;
715: PetscCall(ISDestroy(&stratumIS));
716: }
717: if (num_fs) {
718: PetscCall(PetscMalloc3(num_fs, &elem_ind, elem_list_size, &elem_list, elem_list_size, &side_list));
719: elem_ind[0] = 0;
720: for (fs = 0; fs < num_fs; ++fs) {
721: PetscCall(DMLabelGetStratumIS(fsLabel, fsIdx[fs], &stratumIS));
722: PetscCall(ISGetIndices(stratumIS, &faces));
723: PetscCall(ISGetSize(stratumIS, &fsSize));
724: /* Set Parameters */
725: PetscCallExternal(ex_put_set_param, exo->exoid, EX_SIDE_SET, fsIdx[fs], fsSize, 0);
726: /* Indices */
727: if (fs < num_fs - 1) elem_ind[fs + 1] = elem_ind[fs] + fsSize;
729: for (i = 0; i < fsSize; ++i) {
730: /* Element List */
731: points = NULL;
732: PetscCall(DMPlexGetTransitiveClosure(dm, faces[i], PETSC_FALSE, &numPoints, &points));
733: elem_list[elem_ind[fs] + i] = points[2] + 1;
734: PetscCall(DMPlexRestoreTransitiveClosure(dm, faces[i], PETSC_FALSE, &numPoints, &points));
736: /* Side List */
737: points = NULL;
738: PetscCall(DMPlexGetTransitiveClosure(dm, elem_list[elem_ind[fs] + i] - 1, PETSC_TRUE, &numPoints, &points));
739: for (j = 1; j < numPoints; ++j) {
740: if (points[j * 2] == faces[i]) break;
741: }
742: /* Convert HEX sides */
743: if (numPoints == 27) {
744: if (j == 1) {
745: j = 5;
746: } else if (j == 2) {
747: j = 6;
748: } else if (j == 3) {
749: j = 1;
750: } else if (j == 4) {
751: j = 3;
752: } else if (j == 5) {
753: j = 2;
754: } else if (j == 6) {
755: j = 4;
756: }
757: }
758: /* Convert TET sides */
759: if (numPoints == 15) {
760: --j;
761: if (j == 0) j = 4;
762: }
763: side_list[elem_ind[fs] + i] = j;
764: PetscCall(DMPlexRestoreTransitiveClosure(dm, elem_list[elem_ind[fs] + i] - 1, PETSC_TRUE, &numPoints, &points));
765: }
766: PetscCall(ISRestoreIndices(stratumIS, &faces));
767: PetscCall(ISDestroy(&stratumIS));
768: }
769: PetscCall(ISRestoreIndices(fsIS, &fsIdx));
770: PetscCall(ISDestroy(&fsIS));
772: /* Put side sets */
773: for (fs = 0; fs < num_fs; ++fs) PetscCallExternal(ex_put_set, exo->exoid, EX_SIDE_SET, fsIdx[fs], &elem_list[elem_ind[fs]], &side_list[elem_ind[fs]]);
774: PetscCall(PetscFree3(elem_ind, elem_list, side_list));
775: }
776: }
777: /*
778: close the exodus file
779: */
780: ex_close(exo->exoid);
781: exo->exoid = -1;
782: }
783: PetscCall(PetscSectionDestroy(&coordSection));
785: /*
786: reopen the file in parallel
787: */
788: EXO_mode = EX_WRITE;
789: #if defined(PETSC_USE_64BIT_INDICES)
790: EXO_mode += EX_ALL_INT64_API;
791: #endif
792: CPU_word_size = sizeof(PetscReal);
793: IO_word_size = sizeof(PetscReal);
794: exo->exoid = ex_open_par(exo->filename, EXO_mode, &CPU_word_size, &IO_word_size, &EXO_version, comm, MPI_INFO_NULL);
795: PetscCheck(exo->exoid >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_open_par failed for %s", exo->filename);
796: PetscFunctionReturn(PETSC_SUCCESS);
797: }
799: /*
800: VecView_PlexExodusII_Internal - Write a Vec corresponding in an exodus file
802: Collective
804: Input Parameters:
805: + v - The vector to be written
806: - viewer - The PetscViewerExodusII viewer associate to an exodus file
808: Level: beginner
810: Notes:
811: The exodus result variable index is obtained by comparing the Vec name and the
812: names of variables declared in the exodus file. For instance for a Vec named "V"
813: the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
814: amongst all variables.
815: In the event where a nodal and zonal variable both match, the function will return an error instead of
816: possibly corrupting the file
818: .seealso: `EXOGetVarIndex_Internal()`, `DMPlexView_ExodusII()`, `VecView_PlexExodusII()`
819: @*/
820: PetscErrorCode VecView_PlexExodusII_Internal(Vec v, PetscViewer viewer)
821: {
822: DM dm;
823: MPI_Comm comm;
824: PetscMPIInt rank;
826: int exoid, offsetN = 0, offsetZ = 0;
827: const char *vecname;
828: PetscInt step;
830: PetscFunctionBegin;
831: PetscCall(PetscObjectGetComm((PetscObject)v, &comm));
832: PetscCallMPI(MPI_Comm_rank(comm, &rank));
833: PetscCall(PetscViewerExodusIIGetId(viewer, &exoid));
834: PetscCall(VecGetDM(v, &dm));
835: PetscCall(PetscObjectGetName((PetscObject)v, &vecname));
837: PetscCall(DMGetOutputSequenceNumber(dm, &step, NULL));
838: PetscCall(EXOGetVarIndex_Internal(exoid, EX_NODAL, vecname, &offsetN));
839: PetscCall(EXOGetVarIndex_Internal(exoid, EX_ELEM_BLOCK, vecname, &offsetZ));
840: PetscCheck(offsetN > 0 || offsetZ > 0, comm, PETSC_ERR_FILE_UNEXPECTED, "Found both nodal and zonal variable %s in exodus file. ", vecname);
841: if (offsetN > 0) {
842: PetscCall(VecViewPlex_ExodusII_Nodal_Internal(v, exoid, (int)step + 1, offsetN));
843: } else if (offsetZ > 0) {
844: PetscCall(VecViewPlex_ExodusII_Zonal_Internal(v, exoid, (int)step + 1, offsetZ));
845: } else SETERRQ(comm, PETSC_ERR_FILE_UNEXPECTED, "Could not find nodal or zonal variable %s in exodus file. ", vecname);
846: PetscFunctionReturn(PETSC_SUCCESS);
847: }
849: /*
850: VecLoad_PlexExodusII_Internal - Write a Vec corresponding in an exodus file
852: Collective
854: Input Parameters:
855: + v - The vector to be written
856: - viewer - The PetscViewerExodusII viewer associate to an exodus file
858: Level: beginner
860: Notes:
861: The exodus result variable index is obtained by comparing the Vec name and the
862: names of variables declared in the exodus file. For instance for a Vec named "V"
863: the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
864: amongst all variables.
865: In the event where a nodal and zonal variable both match, the function will return an error instead of
866: possibly corrupting the file
868: .seealso: `EXOGetVarIndex_Internal()`, `DMPlexView_ExodusII()`, `VecView_PlexExodusII()`
869: @*/
870: PetscErrorCode VecLoad_PlexExodusII_Internal(Vec v, PetscViewer viewer)
871: {
872: DM dm;
873: MPI_Comm comm;
874: PetscMPIInt rank;
876: int exoid, offsetN = 0, offsetZ = 0;
877: const char *vecname;
878: PetscInt step;
880: PetscFunctionBegin;
881: PetscCall(PetscObjectGetComm((PetscObject)v, &comm));
882: PetscCallMPI(MPI_Comm_rank(comm, &rank));
883: PetscCall(PetscViewerExodusIIGetId(viewer, &exoid));
884: PetscCall(VecGetDM(v, &dm));
885: PetscCall(PetscObjectGetName((PetscObject)v, &vecname));
887: PetscCall(DMGetOutputSequenceNumber(dm, &step, NULL));
888: PetscCall(EXOGetVarIndex_Internal(exoid, EX_NODAL, vecname, &offsetN));
889: PetscCall(EXOGetVarIndex_Internal(exoid, EX_ELEM_BLOCK, vecname, &offsetZ));
890: PetscCheck(offsetN > 0 || offsetZ > 0, comm, PETSC_ERR_FILE_UNEXPECTED, "Found both nodal and zonal variable %s in exodus file. ", vecname);
891: if (offsetN > 0) PetscCall(VecLoadPlex_ExodusII_Nodal_Internal(v, exoid, (int)step + 1, offsetN));
892: else if (offsetZ > 0) PetscCall(VecLoadPlex_ExodusII_Zonal_Internal(v, exoid, (int)step + 1, offsetZ));
893: else SETERRQ(comm, PETSC_ERR_FILE_UNEXPECTED, "Could not find nodal or zonal variable %s in exodus file. ", vecname);
894: PetscFunctionReturn(PETSC_SUCCESS);
895: }
897: /*
898: VecViewPlex_ExodusII_Nodal_Internal - Write a Vec corresponding to a nodal field to an exodus file
900: Collective
902: Input Parameters:
903: + v - The vector to be written
904: . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance)
905: . step - the time step to write at (exodus steps are numbered starting from 1)
906: - offset - the location of the variable in the file
908: Level: beginner
910: Notes:
911: The exodus result nodal variable index is obtained by comparing the Vec name and the
912: names of nodal variables declared in the exodus file. For instance for a Vec named "V"
913: the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
914: amongst all nodal variables.
916: .seealso: `EXOGetVarIndex_Internal()`, `DMPlexView_ExodusII_Internal()`, `VecLoadNodal_PlexEXO()`, `VecViewZonal_PlexEXO()`, `VecLoadZonal_PlexEXO()`
917: @*/
918: PetscErrorCode VecViewPlex_ExodusII_Nodal_Internal(Vec v, int exoid, int step, int offset)
919: {
920: MPI_Comm comm;
921: PetscMPIInt size;
922: DM dm;
923: Vec vNatural, vComp;
924: const PetscScalar *varray;
925: PetscInt xs, xe, bs;
926: PetscBool useNatural;
928: PetscFunctionBegin;
929: PetscCall(PetscObjectGetComm((PetscObject)v, &comm));
930: PetscCallMPI(MPI_Comm_size(comm, &size));
931: PetscCall(VecGetDM(v, &dm));
932: PetscCall(DMGetUseNatural(dm, &useNatural));
933: useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE;
934: if (useNatural) {
935: PetscCall(DMPlexCreateNaturalVector(dm, &vNatural));
936: PetscCall(DMPlexGlobalToNaturalBegin(dm, v, vNatural));
937: PetscCall(DMPlexGlobalToNaturalEnd(dm, v, vNatural));
938: } else {
939: vNatural = v;
940: }
942: /* Write local chunk of the result in the exodus file
943: exodus stores each component of a vector-valued field as a separate variable.
944: We assume that they are stored sequentially */
945: PetscCall(VecGetOwnershipRange(vNatural, &xs, &xe));
946: PetscCall(VecGetBlockSize(vNatural, &bs));
947: if (bs == 1) {
948: PetscCall(VecGetArrayRead(vNatural, &varray));
949: PetscCallExternal(ex_put_partial_var, exoid, step, EX_NODAL, offset, 1, xs + 1, xe - xs, varray);
950: PetscCall(VecRestoreArrayRead(vNatural, &varray));
951: } else {
952: IS compIS;
953: PetscInt c;
955: PetscCall(ISCreateStride(comm, (xe - xs) / bs, xs, bs, &compIS));
956: for (c = 0; c < bs; ++c) {
957: PetscCall(ISStrideSetStride(compIS, (xe - xs) / bs, xs + c, bs));
958: PetscCall(VecGetSubVector(vNatural, compIS, &vComp));
959: PetscCall(VecGetArrayRead(vComp, &varray));
960: PetscCallExternal(ex_put_partial_var, exoid, step, EX_NODAL, offset + c, 1, xs / bs + 1, (xe - xs) / bs, varray);
961: PetscCall(VecRestoreArrayRead(vComp, &varray));
962: PetscCall(VecRestoreSubVector(vNatural, compIS, &vComp));
963: }
964: PetscCall(ISDestroy(&compIS));
965: }
966: if (useNatural) PetscCall(VecDestroy(&vNatural));
967: PetscFunctionReturn(PETSC_SUCCESS);
968: }
970: /*
971: VecLoadPlex_ExodusII_Nodal_Internal - Read a Vec corresponding to a nodal field from an exodus file
973: Collective
975: Input Parameters:
976: + v - The vector to be written
977: . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance)
978: . step - the time step to read at (exodus steps are numbered starting from 1)
979: - offset - the location of the variable in the file
981: Level: beginner
983: Notes:
984: The exodus result nodal variable index is obtained by comparing the Vec name and the
985: names of nodal variables declared in the exodus file. For instance for a Vec named "V"
986: the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
987: amongst all nodal variables.
989: .seealso: `EXOGetVarIndex_Internal()`, `DMPlexView_ExodusII_Internal()`, `VecViewPlex_ExodusII_Nodal_Internal()`, `VecViewZonal_PlexEXO()`, `VecLoadZonal_PlexEXO()`
990: */
991: PetscErrorCode VecLoadPlex_ExodusII_Nodal_Internal(Vec v, int exoid, int step, int offset)
992: {
993: MPI_Comm comm;
994: PetscMPIInt size;
995: DM dm;
996: Vec vNatural, vComp;
997: PetscScalar *varray;
998: PetscInt xs, xe, bs;
999: PetscBool useNatural;
1001: PetscFunctionBegin;
1002: PetscCall(PetscObjectGetComm((PetscObject)v, &comm));
1003: PetscCallMPI(MPI_Comm_size(comm, &size));
1004: PetscCall(VecGetDM(v, &dm));
1005: PetscCall(DMGetUseNatural(dm, &useNatural));
1006: useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE;
1007: if (useNatural) PetscCall(DMPlexCreateNaturalVector(dm, &vNatural));
1008: else vNatural = v;
1010: /* Read local chunk from the file */
1011: PetscCall(VecGetOwnershipRange(vNatural, &xs, &xe));
1012: PetscCall(VecGetBlockSize(vNatural, &bs));
1013: if (bs == 1) {
1014: PetscCall(VecGetArray(vNatural, &varray));
1015: PetscCallExternal(ex_get_partial_var, exoid, step, EX_NODAL, offset, 1, xs + 1, xe - xs, varray);
1016: PetscCall(VecRestoreArray(vNatural, &varray));
1017: } else {
1018: IS compIS;
1019: PetscInt c;
1021: PetscCall(ISCreateStride(comm, (xe - xs) / bs, xs, bs, &compIS));
1022: for (c = 0; c < bs; ++c) {
1023: PetscCall(ISStrideSetStride(compIS, (xe - xs) / bs, xs + c, bs));
1024: PetscCall(VecGetSubVector(vNatural, compIS, &vComp));
1025: PetscCall(VecGetArray(vComp, &varray));
1026: PetscCallExternal(ex_get_partial_var, exoid, step, EX_NODAL, offset + c, 1, xs / bs + 1, (xe - xs) / bs, varray);
1027: PetscCall(VecRestoreArray(vComp, &varray));
1028: PetscCall(VecRestoreSubVector(vNatural, compIS, &vComp));
1029: }
1030: PetscCall(ISDestroy(&compIS));
1031: }
1032: if (useNatural) {
1033: PetscCall(DMPlexNaturalToGlobalBegin(dm, vNatural, v));
1034: PetscCall(DMPlexNaturalToGlobalEnd(dm, vNatural, v));
1035: PetscCall(VecDestroy(&vNatural));
1036: }
1037: PetscFunctionReturn(PETSC_SUCCESS);
1038: }
1040: /*
1041: VecViewPlex_ExodusII_Zonal_Internal - Write a Vec corresponding to a zonal (cell based) field to an exodus file
1043: Collective
1045: Input Parameters:
1046: + v - The vector to be written
1047: . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance)
1048: . step - the time step to write at (exodus steps are numbered starting from 1)
1049: - offset - the location of the variable in the file
1051: Level: beginner
1053: Notes:
1054: The exodus result zonal variable index is obtained by comparing the Vec name and the
1055: names of zonal variables declared in the exodus file. For instance for a Vec named "V"
1056: the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
1057: amongst all zonal variables.
1059: .seealso: `EXOGetVarIndex_Internal()`, `DMPlexView_ExodusII_Internal()`, `VecViewPlex_ExodusII_Nodal_Internal()`, `VecLoadPlex_ExodusII_Nodal_Internal()`, `VecLoadPlex_ExodusII_Zonal_Internal()`
1060: */
1061: PetscErrorCode VecViewPlex_ExodusII_Zonal_Internal(Vec v, int exoid, int step, int offset)
1062: {
1063: MPI_Comm comm;
1064: PetscMPIInt size;
1065: DM dm;
1066: Vec vNatural, vComp;
1067: const PetscScalar *varray;
1068: PetscInt xs, xe, bs;
1069: PetscBool useNatural;
1070: IS compIS;
1071: PetscInt *csSize, *csID;
1072: PetscInt numCS, set, csxs = 0;
1074: PetscFunctionBegin;
1075: PetscCall(PetscObjectGetComm((PetscObject)v, &comm));
1076: PetscCallMPI(MPI_Comm_size(comm, &size));
1077: PetscCall(VecGetDM(v, &dm));
1078: PetscCall(DMGetUseNatural(dm, &useNatural));
1079: useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE;
1080: if (useNatural) {
1081: PetscCall(DMPlexCreateNaturalVector(dm, &vNatural));
1082: PetscCall(DMPlexGlobalToNaturalBegin(dm, v, vNatural));
1083: PetscCall(DMPlexGlobalToNaturalEnd(dm, v, vNatural));
1084: } else {
1085: vNatural = v;
1086: }
1088: /* Write local chunk of the result in the exodus file
1089: exodus stores each component of a vector-valued field as a separate variable.
1090: We assume that they are stored sequentially
1091: Zonal variables are accessed one element block at a time, so we loop through the cell sets,
1092: but once the vector has been reordered to natural size, we cannot use the label information
1093: to figure out what to save where. */
1094: numCS = ex_inquire_int(exoid, EX_INQ_ELEM_BLK);
1095: PetscCall(PetscMalloc2(numCS, &csID, numCS, &csSize));
1096: PetscCallExternal(ex_get_ids, exoid, EX_ELEM_BLOCK, csID);
1097: for (set = 0; set < numCS; ++set) {
1098: ex_block block;
1100: block.id = csID[set];
1101: block.type = EX_ELEM_BLOCK;
1102: PetscCallExternal(ex_get_block_param, exoid, &block);
1103: csSize[set] = block.num_entry;
1104: }
1105: PetscCall(VecGetOwnershipRange(vNatural, &xs, &xe));
1106: PetscCall(VecGetBlockSize(vNatural, &bs));
1107: if (bs > 1) PetscCall(ISCreateStride(comm, (xe - xs) / bs, xs, bs, &compIS));
1108: for (set = 0; set < numCS; set++) {
1109: PetscInt csLocalSize, c;
1111: /* range of indices for set setID[set]: csxs:csxs + csSize[set]-1
1112: local slice of zonal values: xs/bs,xm/bs-1
1113: intersection: max(xs/bs,csxs),min(xm/bs-1,csxs + csSize[set]-1) */
1114: csLocalSize = PetscMax(0, PetscMin(xe / bs, csxs + csSize[set]) - PetscMax(xs / bs, csxs));
1115: if (bs == 1) {
1116: PetscCall(VecGetArrayRead(vNatural, &varray));
1117: PetscCallExternal(ex_put_partial_var, exoid, step, EX_ELEM_BLOCK, offset, csID[set], PetscMax(xs - csxs, 0) + 1, csLocalSize, &varray[PetscMax(0, csxs - xs)]);
1118: PetscCall(VecRestoreArrayRead(vNatural, &varray));
1119: } else {
1120: for (c = 0; c < bs; ++c) {
1121: PetscCall(ISStrideSetStride(compIS, (xe - xs) / bs, xs + c, bs));
1122: PetscCall(VecGetSubVector(vNatural, compIS, &vComp));
1123: PetscCall(VecGetArrayRead(vComp, &varray));
1124: PetscCallExternal(ex_put_partial_var, exoid, step, EX_ELEM_BLOCK, offset + c, csID[set], PetscMax(xs / bs - csxs, 0) + 1, csLocalSize, &varray[PetscMax(0, csxs - xs / bs)]);
1125: PetscCall(VecRestoreArrayRead(vComp, &varray));
1126: PetscCall(VecRestoreSubVector(vNatural, compIS, &vComp));
1127: }
1128: }
1129: csxs += csSize[set];
1130: }
1131: PetscCall(PetscFree2(csID, csSize));
1132: if (bs > 1) PetscCall(ISDestroy(&compIS));
1133: if (useNatural) PetscCall(VecDestroy(&vNatural));
1134: PetscFunctionReturn(PETSC_SUCCESS);
1135: }
1137: /*
1138: VecLoadPlex_ExodusII_Zonal_Internal - Read a Vec corresponding to a zonal (cell based) field from an exodus file
1140: Collective
1142: Input Parameters:
1143: + v - The vector to be written
1144: . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance)
1145: . step - the time step to read at (exodus steps are numbered starting from 1)
1146: - offset - the location of the variable in the file
1148: Level: beginner
1150: Notes:
1151: The exodus result zonal variable index is obtained by comparing the Vec name and the
1152: names of zonal variables declared in the exodus file. For instance for a Vec named "V"
1153: the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
1154: amongst all zonal variables.
1156: .seealso: `EXOGetVarIndex_Internal()`, `DMPlexView_ExodusII_Internal()`, `VecViewPlex_ExodusII_Nodal_Internal()`, `VecLoadPlex_ExodusII_Nodal_Internal()`, `VecLoadPlex_ExodusII_Zonal_Internal()`
1157: */
1158: PetscErrorCode VecLoadPlex_ExodusII_Zonal_Internal(Vec v, int exoid, int step, int offset)
1159: {
1160: MPI_Comm comm;
1161: PetscMPIInt size;
1162: DM dm;
1163: Vec vNatural, vComp;
1164: PetscScalar *varray;
1165: PetscInt xs, xe, bs;
1166: PetscBool useNatural;
1167: IS compIS;
1168: PetscInt *csSize, *csID;
1169: PetscInt numCS, set, csxs = 0;
1171: PetscFunctionBegin;
1172: PetscCall(PetscObjectGetComm((PetscObject)v, &comm));
1173: PetscCallMPI(MPI_Comm_size(comm, &size));
1174: PetscCall(VecGetDM(v, &dm));
1175: PetscCall(DMGetUseNatural(dm, &useNatural));
1176: useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE;
1177: if (useNatural) PetscCall(DMPlexCreateNaturalVector(dm, &vNatural));
1178: else vNatural = v;
1180: /* Read local chunk of the result in the exodus file
1181: exodus stores each component of a vector-valued field as a separate variable.
1182: We assume that they are stored sequentially
1183: Zonal variables are accessed one element block at a time, so we loop through the cell sets,
1184: but once the vector has been reordered to natural size, we cannot use the label information
1185: to figure out what to save where. */
1186: numCS = ex_inquire_int(exoid, EX_INQ_ELEM_BLK);
1187: PetscCall(PetscMalloc2(numCS, &csID, numCS, &csSize));
1188: PetscCallExternal(ex_get_ids, exoid, EX_ELEM_BLOCK, csID);
1189: for (set = 0; set < numCS; ++set) {
1190: ex_block block;
1192: block.id = csID[set];
1193: block.type = EX_ELEM_BLOCK;
1194: PetscCallExternal(ex_get_block_param, exoid, &block);
1195: csSize[set] = block.num_entry;
1196: }
1197: PetscCall(VecGetOwnershipRange(vNatural, &xs, &xe));
1198: PetscCall(VecGetBlockSize(vNatural, &bs));
1199: if (bs > 1) PetscCall(ISCreateStride(comm, (xe - xs) / bs, xs, bs, &compIS));
1200: for (set = 0; set < numCS; ++set) {
1201: PetscInt csLocalSize, c;
1203: /* range of indices for set setID[set]: csxs:csxs + csSize[set]-1
1204: local slice of zonal values: xs/bs,xm/bs-1
1205: intersection: max(xs/bs,csxs),min(xm/bs-1,csxs + csSize[set]-1) */
1206: csLocalSize = PetscMax(0, PetscMin(xe / bs, csxs + csSize[set]) - PetscMax(xs / bs, csxs));
1207: if (bs == 1) {
1208: PetscCall(VecGetArray(vNatural, &varray));
1209: PetscCallExternal(ex_get_partial_var, exoid, step, EX_ELEM_BLOCK, offset, csID[set], PetscMax(xs - csxs, 0) + 1, csLocalSize, &varray[PetscMax(0, csxs - xs)]);
1210: PetscCall(VecRestoreArray(vNatural, &varray));
1211: } else {
1212: for (c = 0; c < bs; ++c) {
1213: PetscCall(ISStrideSetStride(compIS, (xe - xs) / bs, xs + c, bs));
1214: PetscCall(VecGetSubVector(vNatural, compIS, &vComp));
1215: PetscCall(VecGetArray(vComp, &varray));
1216: PetscCallExternal(ex_get_partial_var, exoid, step, EX_ELEM_BLOCK, offset + c, csID[set], PetscMax(xs / bs - csxs, 0) + 1, csLocalSize, &varray[PetscMax(0, csxs - xs / bs)]);
1217: PetscCall(VecRestoreArray(vComp, &varray));
1218: PetscCall(VecRestoreSubVector(vNatural, compIS, &vComp));
1219: }
1220: }
1221: csxs += csSize[set];
1222: }
1223: PetscCall(PetscFree2(csID, csSize));
1224: if (bs > 1) PetscCall(ISDestroy(&compIS));
1225: if (useNatural) {
1226: PetscCall(DMPlexNaturalToGlobalBegin(dm, vNatural, v));
1227: PetscCall(DMPlexNaturalToGlobalEnd(dm, vNatural, v));
1228: PetscCall(VecDestroy(&vNatural));
1229: }
1230: PetscFunctionReturn(PETSC_SUCCESS);
1231: }
1232: #endif
1234: /*@
1235: PetscViewerExodusIIGetId - Get the file id of the `PETSCVIEWEREXODUSII` file
1237: Logically Collective
1239: Input Parameter:
1240: . viewer - the `PetscViewer`
1242: Output Parameter:
1243: . exoid - The ExodusII file id
1245: Level: intermediate
1247: .seealso: `PETSCVIEWEREXODUSII`, `PetscViewer`, `PetscViewerFileSetMode()`, `PetscViewerCreate()`, `PetscViewerSetType()`, `PetscViewerBinaryOpen()`
1248: @*/
1249: PetscErrorCode PetscViewerExodusIIGetId(PetscViewer viewer, int *exoid)
1250: {
1251: PetscFunctionBegin;
1253: PetscTryMethod(viewer, "PetscViewerGetId_C", (PetscViewer, int *), (viewer, exoid));
1254: PetscFunctionReturn(PETSC_SUCCESS);
1255: }
1257: /*@
1258: PetscViewerExodusIISetOrder - Set the elements order in the exodusII file.
1260: Collective
1262: Input Parameters:
1263: + viewer - the `PETSCVIEWEREXODUSII` viewer
1264: - order - elements order
1266: Output Parameter:
1268: Level: beginner
1270: .seealso: `PETSCVIEWEREXODUSII`, `PetscViewer`, `PetscViewerExodusIIGetId()`, `PetscViewerExodusIIGetOrder()`, `PetscViewerExodusIISetOrder()`
1271: @*/
1272: PetscErrorCode PetscViewerExodusIISetOrder(PetscViewer viewer, PetscInt order)
1273: {
1274: PetscFunctionBegin;
1276: PetscTryMethod(viewer, "PetscViewerSetOrder_C", (PetscViewer, PetscInt), (viewer, order));
1277: PetscFunctionReturn(PETSC_SUCCESS);
1278: }
1280: /*@
1281: PetscViewerExodusIIGetOrder - Get the elements order in the exodusII file.
1283: Collective
1285: Input Parameters:
1286: + viewer - the `PETSCVIEWEREXODUSII` viewer
1287: - order - elements order
1289: Output Parameter:
1291: Level: beginner
1293: .seealso: `PETSCVIEWEREXODUSII`, `PetscViewer`, `PetscViewerExodusIIGetId()`, `PetscViewerExodusIIGetOrder()`, `PetscViewerExodusIISetOrder()`
1294: @*/
1295: PetscErrorCode PetscViewerExodusIIGetOrder(PetscViewer viewer, PetscInt *order)
1296: {
1297: PetscFunctionBegin;
1299: PetscTryMethod(viewer, "PetscViewerGetOrder_C", (PetscViewer, PetscInt *), (viewer, order));
1300: PetscFunctionReturn(PETSC_SUCCESS);
1301: }
1303: /*@C
1304: PetscViewerExodusIIOpen - Opens a file for ExodusII input/output.
1306: Collective
1308: Input Parameters:
1309: + comm - MPI communicator
1310: . name - name of file
1311: - type - type of file
1312: .vb
1313: FILE_MODE_WRITE - create new file for binary output
1314: FILE_MODE_READ - open existing file for binary input
1315: FILE_MODE_APPEND - open existing file for binary output
1316: .ve
1318: Output Parameter:
1319: . exo - `PETSCVIEWEREXODUSII` `PetscViewer` for Exodus II input/output to use with the specified file
1321: Level: beginner
1323: .seealso: `PETSCVIEWEREXODUSII`, `PetscViewer`, `PetscViewerPushFormat()`, `PetscViewerDestroy()`,
1324: `DMLoad()`, `PetscFileMode`, `PetscViewer`, `PetscViewerSetType()`, `PetscViewerFileSetMode()`, `PetscViewerFileSetName()`
1325: @*/
1326: PetscErrorCode PetscViewerExodusIIOpen(MPI_Comm comm, const char name[], PetscFileMode type, PetscViewer *exo)
1327: {
1328: PetscFunctionBegin;
1329: PetscCall(PetscViewerCreate(comm, exo));
1330: PetscCall(PetscViewerSetType(*exo, PETSCVIEWEREXODUSII));
1331: PetscCall(PetscViewerFileSetMode(*exo, type));
1332: PetscCall(PetscViewerFileSetName(*exo, name));
1333: PetscCall(PetscViewerSetFromOptions(*exo));
1334: PetscFunctionReturn(PETSC_SUCCESS);
1335: }
1337: /*@C
1338: DMPlexCreateExodusFromFile - Create a `DMPLEX` mesh from an ExodusII file.
1340: Collective
1342: Input Parameters:
1343: + comm - The MPI communicator
1344: . filename - The name of the ExodusII file
1345: - interpolate - Create faces and edges in the mesh
1347: Output Parameter:
1348: . dm - The `DM` object representing the mesh
1350: Level: beginner
1352: .seealso: [](ch_unstructured), `DM`, `PETSCVIEWEREXODUSII`, `DMPLEX`, `DMCreate()`, `DMPlexCreateExodus()`
1353: @*/
1354: PetscErrorCode DMPlexCreateExodusFromFile(MPI_Comm comm, const char filename[], PetscBool interpolate, DM *dm)
1355: {
1356: PetscMPIInt rank;
1357: #if defined(PETSC_HAVE_EXODUSII)
1358: int CPU_word_size = sizeof(PetscReal), IO_word_size = 0, exoid = -1;
1359: float version;
1360: #endif
1362: PetscFunctionBegin;
1364: PetscCallMPI(MPI_Comm_rank(comm, &rank));
1365: #if defined(PETSC_HAVE_EXODUSII)
1366: if (rank == 0) {
1367: exoid = ex_open(filename, EX_READ, &CPU_word_size, &IO_word_size, &version);
1368: PetscCheck(exoid > 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_open(\"%s\",...) did not return a valid file ID", filename);
1369: }
1370: PetscCall(DMPlexCreateExodus(comm, exoid, interpolate, dm));
1371: if (rank == 0) PetscCallExternal(ex_close, exoid);
1372: PetscFunctionReturn(PETSC_SUCCESS);
1373: #else
1374: SETERRQ(comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1375: #endif
1376: }
1378: #if defined(PETSC_HAVE_EXODUSII)
1379: static PetscErrorCode ExodusGetCellType_Internal(const char *elem_type, DMPolytopeType *ct)
1380: {
1381: PetscBool flg;
1383: PetscFunctionBegin;
1384: *ct = DM_POLYTOPE_UNKNOWN;
1385: PetscCall(PetscStrcmp(elem_type, "TRI", &flg));
1386: if (flg) {
1387: *ct = DM_POLYTOPE_TRIANGLE;
1388: goto done;
1389: }
1390: PetscCall(PetscStrcmp(elem_type, "TRI3", &flg));
1391: if (flg) {
1392: *ct = DM_POLYTOPE_TRIANGLE;
1393: goto done;
1394: }
1395: PetscCall(PetscStrcmp(elem_type, "QUAD", &flg));
1396: if (flg) {
1397: *ct = DM_POLYTOPE_QUADRILATERAL;
1398: goto done;
1399: }
1400: PetscCall(PetscStrcmp(elem_type, "QUAD4", &flg));
1401: if (flg) {
1402: *ct = DM_POLYTOPE_QUADRILATERAL;
1403: goto done;
1404: }
1405: PetscCall(PetscStrcmp(elem_type, "SHELL4", &flg));
1406: if (flg) {
1407: *ct = DM_POLYTOPE_QUADRILATERAL;
1408: goto done;
1409: }
1410: PetscCall(PetscStrcmp(elem_type, "TETRA", &flg));
1411: if (flg) {
1412: *ct = DM_POLYTOPE_TETRAHEDRON;
1413: goto done;
1414: }
1415: PetscCall(PetscStrcmp(elem_type, "TET4", &flg));
1416: if (flg) {
1417: *ct = DM_POLYTOPE_TETRAHEDRON;
1418: goto done;
1419: }
1420: PetscCall(PetscStrcmp(elem_type, "WEDGE", &flg));
1421: if (flg) {
1422: *ct = DM_POLYTOPE_TRI_PRISM;
1423: goto done;
1424: }
1425: PetscCall(PetscStrcmp(elem_type, "HEX", &flg));
1426: if (flg) {
1427: *ct = DM_POLYTOPE_HEXAHEDRON;
1428: goto done;
1429: }
1430: PetscCall(PetscStrcmp(elem_type, "HEX8", &flg));
1431: if (flg) {
1432: *ct = DM_POLYTOPE_HEXAHEDRON;
1433: goto done;
1434: }
1435: PetscCall(PetscStrcmp(elem_type, "HEXAHEDRON", &flg));
1436: if (flg) {
1437: *ct = DM_POLYTOPE_HEXAHEDRON;
1438: goto done;
1439: }
1440: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unrecognized element type %s", elem_type);
1441: done:
1442: PetscFunctionReturn(PETSC_SUCCESS);
1443: }
1444: #endif
1446: /*@
1447: DMPlexCreateExodus - Create a `DMPLEX` mesh from an ExodusII file ID.
1449: Collective
1451: Input Parameters:
1452: + comm - The MPI communicator
1453: . exoid - The ExodusII id associated with a exodus file and obtained using ex_open
1454: - interpolate - Create faces and edges in the mesh
1456: Output Parameter:
1457: . dm - The `DM` object representing the mesh
1459: Level: beginner
1461: .seealso: [](ch_unstructured), `DM`, `PETSCVIEWEREXODUSII`, `DMPLEX`, `DMPLEX`, `DMCreate()`
1462: @*/
1463: PetscErrorCode DMPlexCreateExodus(MPI_Comm comm, PetscInt exoid, PetscBool interpolate, DM *dm)
1464: {
1465: #if defined(PETSC_HAVE_EXODUSII)
1466: PetscMPIInt num_proc, rank;
1467: DMLabel cellSets = NULL, faceSets = NULL, vertSets = NULL;
1468: PetscSection coordSection;
1469: Vec coordinates;
1470: PetscScalar *coords;
1471: PetscInt coordSize, v;
1472: /* Read from ex_get_init() */
1473: char title[PETSC_MAX_PATH_LEN + 1];
1474: int dim = 0, dimEmbed = 0, numVertices = 0, numCells = 0;
1475: int num_cs = 0, num_vs = 0, num_fs = 0;
1476: #endif
1478: PetscFunctionBegin;
1479: #if defined(PETSC_HAVE_EXODUSII)
1480: PetscCallMPI(MPI_Comm_rank(comm, &rank));
1481: PetscCallMPI(MPI_Comm_size(comm, &num_proc));
1482: PetscCall(DMCreate(comm, dm));
1483: PetscCall(DMSetType(*dm, DMPLEX));
1484: /* Open EXODUS II file and read basic information on rank 0, then broadcast to all processors */
1485: if (rank == 0) {
1486: PetscCall(PetscMemzero(title, PETSC_MAX_PATH_LEN + 1));
1487: PetscCallExternal(ex_get_init, exoid, title, &dimEmbed, &numVertices, &numCells, &num_cs, &num_vs, &num_fs);
1488: PetscCheck(num_cs, PETSC_COMM_SELF, PETSC_ERR_SUP, "Exodus file does not contain any cell set");
1489: }
1490: PetscCallMPI(MPI_Bcast(title, PETSC_MAX_PATH_LEN + 1, MPI_CHAR, 0, comm));
1491: PetscCallMPI(MPI_Bcast(&dim, 1, MPI_INT, 0, comm));
1492: PetscCall(PetscObjectSetName((PetscObject)*dm, title));
1493: PetscCall(DMPlexSetChart(*dm, 0, numCells + numVertices));
1494: /* We do not want this label automatically computed, instead we compute it here */
1495: PetscCall(DMCreateLabel(*dm, "celltype"));
1497: /* Read cell sets information */
1498: if (rank == 0) {
1499: PetscInt *cone;
1500: int c, cs, ncs, c_loc, v, v_loc;
1501: /* Read from ex_get_elem_blk_ids() */
1502: int *cs_id, *cs_order;
1503: /* Read from ex_get_elem_block() */
1504: char buffer[PETSC_MAX_PATH_LEN + 1];
1505: int num_cell_in_set, num_vertex_per_cell, num_hybrid, num_attr;
1506: /* Read from ex_get_elem_conn() */
1507: int *cs_connect;
1509: /* Get cell sets IDs */
1510: PetscCall(PetscMalloc2(num_cs, &cs_id, num_cs, &cs_order));
1511: PetscCallExternal(ex_get_ids, exoid, EX_ELEM_BLOCK, cs_id);
1512: /* Read the cell set connectivity table and build mesh topology
1513: EXO standard requires that cells in cell sets be numbered sequentially and be pairwise disjoint. */
1514: /* Check for a hybrid mesh */
1515: for (cs = 0, num_hybrid = 0; cs < num_cs; ++cs) {
1516: DMPolytopeType ct;
1517: char elem_type[PETSC_MAX_PATH_LEN];
1519: PetscCall(PetscArrayzero(elem_type, sizeof(elem_type)));
1520: PetscCallExternal(ex_get_elem_type, exoid, cs_id[cs], elem_type);
1521: PetscCall(ExodusGetCellType_Internal(elem_type, &ct));
1522: dim = PetscMax(dim, DMPolytopeTypeGetDim(ct));
1523: PetscCallExternal(ex_get_block, exoid, EX_ELEM_BLOCK, cs_id[cs], buffer, &num_cell_in_set, &num_vertex_per_cell, 0, 0, &num_attr);
1524: switch (ct) {
1525: case DM_POLYTOPE_TRI_PRISM:
1526: cs_order[cs] = cs;
1527: ++num_hybrid;
1528: break;
1529: default:
1530: for (c = cs; c > cs - num_hybrid; --c) cs_order[c] = cs_order[c - 1];
1531: cs_order[cs - num_hybrid] = cs;
1532: }
1533: }
1534: /* First set sizes */
1535: for (ncs = 0, c = 0; ncs < num_cs; ++ncs) {
1536: DMPolytopeType ct;
1537: char elem_type[PETSC_MAX_PATH_LEN];
1538: const PetscInt cs = cs_order[ncs];
1540: PetscCall(PetscArrayzero(elem_type, sizeof(elem_type)));
1541: PetscCallExternal(ex_get_elem_type, exoid, cs_id[cs], elem_type);
1542: PetscCall(ExodusGetCellType_Internal(elem_type, &ct));
1543: PetscCallExternal(ex_get_block, exoid, EX_ELEM_BLOCK, cs_id[cs], buffer, &num_cell_in_set, &num_vertex_per_cell, 0, 0, &num_attr);
1544: for (c_loc = 0; c_loc < num_cell_in_set; ++c_loc, ++c) {
1545: PetscCall(DMPlexSetConeSize(*dm, c, num_vertex_per_cell));
1546: PetscCall(DMPlexSetCellType(*dm, c, ct));
1547: }
1548: }
1549: for (v = numCells; v < numCells + numVertices; ++v) PetscCall(DMPlexSetCellType(*dm, v, DM_POLYTOPE_POINT));
1550: PetscCall(DMSetUp(*dm));
1551: for (ncs = 0, c = 0; ncs < num_cs; ++ncs) {
1552: const PetscInt cs = cs_order[ncs];
1553: PetscCallExternal(ex_get_block, exoid, EX_ELEM_BLOCK, cs_id[cs], buffer, &num_cell_in_set, &num_vertex_per_cell, 0, 0, &num_attr);
1554: PetscCall(PetscMalloc2(num_vertex_per_cell * num_cell_in_set, &cs_connect, num_vertex_per_cell, &cone));
1555: PetscCallExternal(ex_get_conn, exoid, EX_ELEM_BLOCK, cs_id[cs], cs_connect, NULL, NULL);
1556: /* EXO uses Fortran-based indexing, DMPlex uses C-style and numbers cell first then vertices. */
1557: for (c_loc = 0, v = 0; c_loc < num_cell_in_set; ++c_loc, ++c) {
1558: DMPolytopeType ct;
1560: for (v_loc = 0; v_loc < num_vertex_per_cell; ++v_loc, ++v) cone[v_loc] = cs_connect[v] + numCells - 1;
1561: PetscCall(DMPlexGetCellType(*dm, c, &ct));
1562: PetscCall(DMPlexInvertCell(ct, cone));
1563: PetscCall(DMPlexSetCone(*dm, c, cone));
1564: PetscCall(DMSetLabelValue_Fast(*dm, &cellSets, "Cell Sets", c, cs_id[cs]));
1565: }
1566: PetscCall(PetscFree2(cs_connect, cone));
1567: }
1568: PetscCall(PetscFree2(cs_id, cs_order));
1569: }
1570: {
1571: PetscInt ints[] = {dim, dimEmbed};
1573: PetscCallMPI(MPI_Bcast(ints, 2, MPIU_INT, 0, comm));
1574: PetscCall(DMSetDimension(*dm, ints[0]));
1575: PetscCall(DMSetCoordinateDim(*dm, ints[1]));
1576: dim = ints[0];
1577: dimEmbed = ints[1];
1578: }
1579: PetscCall(DMPlexSymmetrize(*dm));
1580: PetscCall(DMPlexStratify(*dm));
1581: if (interpolate) {
1582: DM idm;
1584: PetscCall(DMPlexInterpolate(*dm, &idm));
1585: PetscCall(DMDestroy(dm));
1586: *dm = idm;
1587: }
1589: /* Create vertex set label */
1590: if (rank == 0 && (num_vs > 0)) {
1591: int vs, v;
1592: /* Read from ex_get_node_set_ids() */
1593: int *vs_id;
1594: /* Read from ex_get_node_set_param() */
1595: int num_vertex_in_set;
1596: /* Read from ex_get_node_set() */
1597: int *vs_vertex_list;
1599: /* Get vertex set ids */
1600: PetscCall(PetscMalloc1(num_vs, &vs_id));
1601: PetscCallExternal(ex_get_ids, exoid, EX_NODE_SET, vs_id);
1602: for (vs = 0; vs < num_vs; ++vs) {
1603: PetscCallExternal(ex_get_set_param, exoid, EX_NODE_SET, vs_id[vs], &num_vertex_in_set, NULL);
1604: PetscCall(PetscMalloc1(num_vertex_in_set, &vs_vertex_list));
1605: PetscCallExternal(ex_get_set, exoid, EX_NODE_SET, vs_id[vs], vs_vertex_list, NULL);
1606: for (v = 0; v < num_vertex_in_set; ++v) PetscCall(DMSetLabelValue_Fast(*dm, &vertSets, "Vertex Sets", vs_vertex_list[v] + numCells - 1, vs_id[vs]));
1607: PetscCall(PetscFree(vs_vertex_list));
1608: }
1609: PetscCall(PetscFree(vs_id));
1610: }
1611: /* Read coordinates */
1612: PetscCall(DMGetCoordinateSection(*dm, &coordSection));
1613: PetscCall(PetscSectionSetNumFields(coordSection, 1));
1614: PetscCall(PetscSectionSetFieldComponents(coordSection, 0, dimEmbed));
1615: PetscCall(PetscSectionSetChart(coordSection, numCells, numCells + numVertices));
1616: for (v = numCells; v < numCells + numVertices; ++v) {
1617: PetscCall(PetscSectionSetDof(coordSection, v, dimEmbed));
1618: PetscCall(PetscSectionSetFieldDof(coordSection, v, 0, dimEmbed));
1619: }
1620: PetscCall(PetscSectionSetUp(coordSection));
1621: PetscCall(PetscSectionGetStorageSize(coordSection, &coordSize));
1622: PetscCall(VecCreate(PETSC_COMM_SELF, &coordinates));
1623: PetscCall(PetscObjectSetName((PetscObject)coordinates, "coordinates"));
1624: PetscCall(VecSetSizes(coordinates, coordSize, PETSC_DETERMINE));
1625: PetscCall(VecSetBlockSize(coordinates, dimEmbed));
1626: PetscCall(VecSetType(coordinates, VECSTANDARD));
1627: PetscCall(VecGetArray(coordinates, &coords));
1628: if (rank == 0) {
1629: PetscReal *x, *y, *z;
1631: PetscCall(PetscMalloc3(numVertices, &x, numVertices, &y, numVertices, &z));
1632: PetscCallExternal(ex_get_coord, exoid, x, y, z);
1633: if (dimEmbed > 0) {
1634: for (v = 0; v < numVertices; ++v) coords[v * dimEmbed + 0] = x[v];
1635: }
1636: if (dimEmbed > 1) {
1637: for (v = 0; v < numVertices; ++v) coords[v * dimEmbed + 1] = y[v];
1638: }
1639: if (dimEmbed > 2) {
1640: for (v = 0; v < numVertices; ++v) coords[v * dimEmbed + 2] = z[v];
1641: }
1642: PetscCall(PetscFree3(x, y, z));
1643: }
1644: PetscCall(VecRestoreArray(coordinates, &coords));
1645: PetscCall(DMSetCoordinatesLocal(*dm, coordinates));
1646: PetscCall(VecDestroy(&coordinates));
1648: /* Create side set label */
1649: if (rank == 0 && interpolate && (num_fs > 0)) {
1650: int fs, f, voff;
1651: /* Read from ex_get_side_set_ids() */
1652: int *fs_id;
1653: /* Read from ex_get_side_set_param() */
1654: int num_side_in_set;
1655: /* Read from ex_get_side_set_node_list() */
1656: int *fs_vertex_count_list, *fs_vertex_list;
1657: /* Read side set labels */
1658: char fs_name[MAX_STR_LENGTH + 1];
1659: size_t fs_name_len;
1661: /* Get side set ids */
1662: PetscCall(PetscMalloc1(num_fs, &fs_id));
1663: PetscCallExternal(ex_get_ids, exoid, EX_SIDE_SET, fs_id);
1664: for (fs = 0; fs < num_fs; ++fs) {
1665: PetscCallExternal(ex_get_set_param, exoid, EX_SIDE_SET, fs_id[fs], &num_side_in_set, NULL);
1666: PetscCall(PetscMalloc2(num_side_in_set, &fs_vertex_count_list, num_side_in_set * 4, &fs_vertex_list));
1667: PetscCallExternal(ex_get_side_set_node_list, exoid, fs_id[fs], fs_vertex_count_list, fs_vertex_list);
1668: /* Get the specific name associated with this side set ID. */
1669: int fs_name_err = ex_get_name(exoid, EX_SIDE_SET, fs_id[fs], fs_name);
1670: if (!fs_name_err) {
1671: PetscCall(PetscStrlen(fs_name, &fs_name_len));
1672: if (fs_name_len == 0) PetscCall(PetscStrncpy(fs_name, "Face Sets", MAX_STR_LENGTH));
1673: }
1674: for (f = 0, voff = 0; f < num_side_in_set; ++f) {
1675: const PetscInt *faces = NULL;
1676: PetscInt faceSize = fs_vertex_count_list[f], numFaces;
1677: PetscInt faceVertices[4], v;
1679: PetscCheck(faceSize <= 4, comm, PETSC_ERR_ARG_WRONG, "ExodusII side cannot have %" PetscInt_FMT " > 4 vertices", faceSize);
1680: for (v = 0; v < faceSize; ++v, ++voff) faceVertices[v] = fs_vertex_list[voff] + numCells - 1;
1681: PetscCall(DMPlexGetFullJoin(*dm, faceSize, faceVertices, &numFaces, &faces));
1682: PetscCheck(numFaces == 1, comm, PETSC_ERR_ARG_WRONG, "Invalid ExodusII side %d in set %d maps to %" PetscInt_FMT " faces", f, fs, numFaces);
1683: PetscCall(DMSetLabelValue_Fast(*dm, &faceSets, "Face Sets", faces[0], fs_id[fs]));
1684: /* Only add the label if one has been detected for this side set. */
1685: if (!fs_name_err) PetscCall(DMSetLabelValue(*dm, fs_name, faces[0], fs_id[fs]));
1686: PetscCall(DMPlexRestoreJoin(*dm, faceSize, faceVertices, &numFaces, &faces));
1687: }
1688: PetscCall(PetscFree2(fs_vertex_count_list, fs_vertex_list));
1689: }
1690: PetscCall(PetscFree(fs_id));
1691: }
1693: { /* Create Cell/Face/Vertex Sets labels at all processes */
1694: enum {
1695: n = 3
1696: };
1697: PetscBool flag[n];
1699: flag[0] = cellSets ? PETSC_TRUE : PETSC_FALSE;
1700: flag[1] = faceSets ? PETSC_TRUE : PETSC_FALSE;
1701: flag[2] = vertSets ? PETSC_TRUE : PETSC_FALSE;
1702: PetscCallMPI(MPI_Bcast(flag, n, MPIU_BOOL, 0, comm));
1703: if (flag[0]) PetscCall(DMCreateLabel(*dm, "Cell Sets"));
1704: if (flag[1]) PetscCall(DMCreateLabel(*dm, "Face Sets"));
1705: if (flag[2]) PetscCall(DMCreateLabel(*dm, "Vertex Sets"));
1706: }
1707: PetscFunctionReturn(PETSC_SUCCESS);
1708: #else
1709: SETERRQ(comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1710: #endif
1711: }