Actual source code: gr2.c


  2: /*
  3:    Plots vectors obtained with DMDACreate2d()
  4: */

  6: #include <petsc/private/dmdaimpl.h>
  7: #include <petsc/private/glvisvecimpl.h>
  8: #include <petsc/private/viewerhdf5impl.h>
  9: #include <petscdraw.h>

 11: /*
 12:         The data that is passed into the graphics callback
 13: */
 14: typedef struct {
 15:   PetscMPIInt        rank;
 16:   PetscInt           m, n, dof, k;
 17:   PetscReal          xmin, xmax, ymin, ymax, min, max;
 18:   const PetscScalar *xy, *v;
 19:   PetscBool          showaxis, showgrid;
 20:   const char        *name0, *name1;
 21: } ZoomCtx;

 23: /*
 24:        This does the drawing for one particular field
 25:     in one particular set of coordinates. It is a callback
 26:     called from PetscDrawZoom()
 27: */
 28: PetscErrorCode VecView_MPI_Draw_DA2d_Zoom(PetscDraw draw, void *ctx)
 29: {
 30:   ZoomCtx           *zctx = (ZoomCtx *)ctx;
 31:   PetscInt           m, n, i, j, k, dof, id, c1, c2, c3, c4;
 32:   PetscReal          min, max, x1, x2, x3, x4, y_1, y2, y3, y4;
 33:   const PetscScalar *xy, *v;

 35:   PetscFunctionBegin;
 36:   m   = zctx->m;
 37:   n   = zctx->n;
 38:   dof = zctx->dof;
 39:   k   = zctx->k;
 40:   xy  = zctx->xy;
 41:   v   = zctx->v;
 42:   min = zctx->min;
 43:   max = zctx->max;

 45:   /* PetscDraw the contour plot patch */
 46:   PetscDrawCollectiveBegin(draw);
 47:   for (j = 0; j < n - 1; j++) {
 48:     for (i = 0; i < m - 1; i++) {
 49:       id  = i + j * m;
 50:       x1  = PetscRealPart(xy[2 * id]);
 51:       y_1 = PetscRealPart(xy[2 * id + 1]);
 52:       c1  = PetscDrawRealToColor(PetscRealPart(v[k + dof * id]), min, max);

 54:       id = i + j * m + 1;
 55:       x2 = PetscRealPart(xy[2 * id]);
 56:       y2 = PetscRealPart(xy[2 * id + 1]);
 57:       c2 = PetscDrawRealToColor(PetscRealPart(v[k + dof * id]), min, max);

 59:       id = i + j * m + 1 + m;
 60:       x3 = PetscRealPart(xy[2 * id]);
 61:       y3 = PetscRealPart(xy[2 * id + 1]);
 62:       c3 = PetscDrawRealToColor(PetscRealPart(v[k + dof * id]), min, max);

 64:       id = i + j * m + m;
 65:       x4 = PetscRealPart(xy[2 * id]);
 66:       y4 = PetscRealPart(xy[2 * id + 1]);
 67:       c4 = PetscDrawRealToColor(PetscRealPart(v[k + dof * id]), min, max);

 69:       PetscCall(PetscDrawTriangle(draw, x1, y_1, x2, y2, x3, y3, c1, c2, c3));
 70:       PetscCall(PetscDrawTriangle(draw, x1, y_1, x3, y3, x4, y4, c1, c3, c4));
 71:       if (zctx->showgrid) {
 72:         PetscCall(PetscDrawLine(draw, x1, y_1, x2, y2, PETSC_DRAW_BLACK));
 73:         PetscCall(PetscDrawLine(draw, x2, y2, x3, y3, PETSC_DRAW_BLACK));
 74:         PetscCall(PetscDrawLine(draw, x3, y3, x4, y4, PETSC_DRAW_BLACK));
 75:         PetscCall(PetscDrawLine(draw, x4, y4, x1, y_1, PETSC_DRAW_BLACK));
 76:       }
 77:     }
 78:   }
 79:   if (zctx->showaxis && !zctx->rank) {
 80:     if (zctx->name0 || zctx->name1) {
 81:       PetscReal xl, yl, xr, yr, x, y;
 82:       PetscCall(PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr));
 83:       x  = xl + .30 * (xr - xl);
 84:       xl = xl + .01 * (xr - xl);
 85:       y  = yr - .30 * (yr - yl);
 86:       yl = yl + .01 * (yr - yl);
 87:       if (zctx->name0) PetscCall(PetscDrawString(draw, x, yl, PETSC_DRAW_BLACK, zctx->name0));
 88:       if (zctx->name1) PetscCall(PetscDrawStringVertical(draw, xl, y, PETSC_DRAW_BLACK, zctx->name1));
 89:     }
 90:     /*
 91:        Ideally we would use the PetscDrawAxis object to manage displaying the coordinate limits
 92:        but that may require some refactoring.
 93:     */
 94:     {
 95:       double    xmin = (double)zctx->xmin, ymin = (double)zctx->ymin;
 96:       double    xmax = (double)zctx->xmax, ymax = (double)zctx->ymax;
 97:       char      value[16];
 98:       size_t    len;
 99:       PetscReal w;
100:       PetscCall(PetscSNPrintf(value, 16, "%0.2e", xmin));
101:       PetscCall(PetscDrawString(draw, xmin, ymin - .05 * (ymax - ymin), PETSC_DRAW_BLACK, value));
102:       PetscCall(PetscSNPrintf(value, 16, "%0.2e", xmax));
103:       PetscCall(PetscStrlen(value, &len));
104:       PetscCall(PetscDrawStringGetSize(draw, &w, NULL));
105:       PetscCall(PetscDrawString(draw, xmax - len * w, ymin - .05 * (ymax - ymin), PETSC_DRAW_BLACK, value));
106:       PetscCall(PetscSNPrintf(value, 16, "%0.2e", ymin));
107:       PetscCall(PetscDrawString(draw, xmin - .05 * (xmax - xmin), ymin, PETSC_DRAW_BLACK, value));
108:       PetscCall(PetscSNPrintf(value, 16, "%0.2e", ymax));
109:       PetscCall(PetscDrawString(draw, xmin - .05 * (xmax - xmin), ymax, PETSC_DRAW_BLACK, value));
110:     }
111:   }
112:   PetscDrawCollectiveEnd(draw);
113:   PetscFunctionReturn(PETSC_SUCCESS);
114: }

116: PetscErrorCode VecView_MPI_Draw_DA2d(Vec xin, PetscViewer viewer)
117: {
118:   DM                  da, dac, dag;
119:   PetscInt            N, s, M, w, ncoors = 4;
120:   const PetscInt     *lx, *ly;
121:   PetscReal           coors[4];
122:   PetscDraw           draw, popup;
123:   PetscBool           isnull, useports = PETSC_FALSE;
124:   MPI_Comm            comm;
125:   Vec                 xlocal, xcoor, xcoorl;
126:   DMBoundaryType      bx, by;
127:   DMDAStencilType     st;
128:   ZoomCtx             zctx;
129:   PetscDrawViewPorts *ports = NULL;
130:   PetscViewerFormat   format;
131:   PetscInt           *displayfields;
132:   PetscInt            ndisplayfields, i, nbounds;
133:   const PetscReal    *bounds;

135:   PetscFunctionBegin;
136:   zctx.showgrid = PETSC_FALSE;
137:   zctx.showaxis = PETSC_TRUE;

139:   PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
140:   PetscCall(PetscDrawIsNull(draw, &isnull));
141:   if (isnull) PetscFunctionReturn(PETSC_SUCCESS);

143:   PetscCall(PetscViewerDrawGetBounds(viewer, &nbounds, &bounds));

145:   PetscCall(VecGetDM(xin, &da));
146:   PetscCheck(da, PetscObjectComm((PetscObject)xin), PETSC_ERR_ARG_WRONG, "Vector not generated from a DMDA");

148:   PetscCall(PetscObjectGetComm((PetscObject)xin, &comm));
149:   PetscCallMPI(MPI_Comm_rank(comm, &zctx.rank));

151:   PetscCall(DMDAGetInfo(da, NULL, &M, &N, NULL, &zctx.m, &zctx.n, NULL, &w, &s, &bx, &by, NULL, &st));
152:   PetscCall(DMDAGetOwnershipRanges(da, &lx, &ly, NULL));

154:   /*
155:      Obtain a sequential vector that is going to contain the local values plus ONE layer of
156:      ghosted values to draw the graphics from. We also need its corresponding DMDA (dac) that will
157:      update the local values plus ONE layer of ghost values.
158:   */
159:   PetscCall(PetscObjectQuery((PetscObject)da, "GraphicsGhosted", (PetscObject *)&xlocal));
160:   if (!xlocal) {
161:     if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) {
162:       /*
163:          if original da is not of stencil width one, or periodic or not a box stencil then
164:          create a special DMDA to handle one level of ghost points for graphics
165:       */
166:       PetscCall(DMDACreate2d(comm, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_BOX, M, N, zctx.m, zctx.n, w, 1, lx, ly, &dac));
167:       PetscCall(DMSetUp(dac));
168:       PetscCall(PetscInfo(da, "Creating auxiliary DMDA for managing graphics ghost points\n"));
169:     } else {
170:       /* otherwise we can use the da we already have */
171:       dac = da;
172:     }
173:     /* create local vector for holding ghosted values used in graphics */
174:     PetscCall(DMCreateLocalVector(dac, &xlocal));
175:     if (dac != da) {
176:       /* don't keep any public reference of this DMDA, is is only available through xlocal */
177:       PetscCall(PetscObjectDereference((PetscObject)dac));
178:     } else {
179:       /* remove association between xlocal and da, because below we compose in the opposite
180:          direction and if we left this connect we'd get a loop, so the objects could
181:          never be destroyed */
182:       PetscCall(PetscObjectRemoveReference((PetscObject)xlocal, "__PETSc_dm"));
183:     }
184:     PetscCall(PetscObjectCompose((PetscObject)da, "GraphicsGhosted", (PetscObject)xlocal));
185:     PetscCall(PetscObjectDereference((PetscObject)xlocal));
186:   } else {
187:     if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) {
188:       PetscCall(VecGetDM(xlocal, &dac));
189:     } else {
190:       dac = da;
191:     }
192:   }

194:   /*
195:       Get local (ghosted) values of vector
196:   */
197:   PetscCall(DMGlobalToLocalBegin(dac, xin, INSERT_VALUES, xlocal));
198:   PetscCall(DMGlobalToLocalEnd(dac, xin, INSERT_VALUES, xlocal));
199:   PetscCall(VecGetArrayRead(xlocal, &zctx.v));

201:   /*
202:       Get coordinates of nodes
203:   */
204:   PetscCall(DMGetCoordinates(da, &xcoor));
205:   if (!xcoor) {
206:     PetscCall(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0));
207:     PetscCall(DMGetCoordinates(da, &xcoor));
208:   }

210:   /*
211:       Determine the min and max coordinates in plot
212:   */
213:   PetscCall(VecStrideMin(xcoor, 0, NULL, &zctx.xmin));
214:   PetscCall(VecStrideMax(xcoor, 0, NULL, &zctx.xmax));
215:   PetscCall(VecStrideMin(xcoor, 1, NULL, &zctx.ymin));
216:   PetscCall(VecStrideMax(xcoor, 1, NULL, &zctx.ymax));
217:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-draw_contour_axis", &zctx.showaxis, NULL));
218:   if (zctx.showaxis) {
219:     coors[0] = zctx.xmin - .05 * (zctx.xmax - zctx.xmin);
220:     coors[1] = zctx.ymin - .05 * (zctx.ymax - zctx.ymin);
221:     coors[2] = zctx.xmax + .05 * (zctx.xmax - zctx.xmin);
222:     coors[3] = zctx.ymax + .05 * (zctx.ymax - zctx.ymin);
223:   } else {
224:     coors[0] = zctx.xmin;
225:     coors[1] = zctx.ymin;
226:     coors[2] = zctx.xmax;
227:     coors[3] = zctx.ymax;
228:   }
229:   PetscCall(PetscOptionsGetRealArray(NULL, NULL, "-draw_coordinates", coors, &ncoors, NULL));
230:   PetscCall(PetscInfo(da, "Preparing DMDA 2d contour plot coordinates %g %g %g %g\n", (double)coors[0], (double)coors[1], (double)coors[2], (double)coors[3]));

232:   /*
233:       Get local ghosted version of coordinates
234:   */
235:   PetscCall(PetscObjectQuery((PetscObject)da, "GraphicsCoordinateGhosted", (PetscObject *)&xcoorl));
236:   if (!xcoorl) {
237:     /* create DMDA to get local version of graphics */
238:     PetscCall(DMDACreate2d(comm, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_BOX, M, N, zctx.m, zctx.n, 2, 1, lx, ly, &dag));
239:     PetscCall(DMSetUp(dag));
240:     PetscCall(PetscInfo(dag, "Creating auxiliary DMDA for managing graphics coordinates ghost points\n"));
241:     PetscCall(DMCreateLocalVector(dag, &xcoorl));
242:     PetscCall(PetscObjectCompose((PetscObject)da, "GraphicsCoordinateGhosted", (PetscObject)xcoorl));
243:     PetscCall(PetscObjectDereference((PetscObject)dag));
244:     PetscCall(PetscObjectDereference((PetscObject)xcoorl));
245:   } else PetscCall(VecGetDM(xcoorl, &dag));
246:   PetscCall(DMGlobalToLocalBegin(dag, xcoor, INSERT_VALUES, xcoorl));
247:   PetscCall(DMGlobalToLocalEnd(dag, xcoor, INSERT_VALUES, xcoorl));
248:   PetscCall(VecGetArrayRead(xcoorl, &zctx.xy));
249:   PetscCall(DMDAGetCoordinateName(da, 0, &zctx.name0));
250:   PetscCall(DMDAGetCoordinateName(da, 1, &zctx.name1));

252:   /*
253:       Get information about size of area each processor must do graphics for
254:   */
255:   PetscCall(DMDAGetInfo(dac, NULL, &M, &N, NULL, NULL, NULL, NULL, &zctx.dof, NULL, &bx, &by, NULL, NULL));
256:   PetscCall(DMDAGetGhostCorners(dac, NULL, NULL, NULL, &zctx.m, &zctx.n, NULL));
257:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-draw_contour_grid", &zctx.showgrid, NULL));

259:   PetscCall(DMDASelectFields(da, &ndisplayfields, &displayfields));
260:   PetscCall(PetscViewerGetFormat(viewer, &format));
261:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-draw_ports", &useports, NULL));
262:   if (format == PETSC_VIEWER_DRAW_PORTS) useports = PETSC_TRUE;
263:   if (useports) {
264:     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
265:     PetscCall(PetscDrawCheckResizedWindow(draw));
266:     PetscCall(PetscDrawClear(draw));
267:     PetscCall(PetscDrawViewPortsCreate(draw, ndisplayfields, &ports));
268:   }

270:   /*
271:       Loop over each field; drawing each in a different window
272:   */
273:   for (i = 0; i < ndisplayfields; i++) {
274:     zctx.k = displayfields[i];

276:     /* determine the min and max value in plot */
277:     PetscCall(VecStrideMin(xin, zctx.k, NULL, &zctx.min));
278:     PetscCall(VecStrideMax(xin, zctx.k, NULL, &zctx.max));
279:     if (zctx.k < nbounds) {
280:       zctx.min = bounds[2 * zctx.k];
281:       zctx.max = bounds[2 * zctx.k + 1];
282:     }
283:     if (zctx.min == zctx.max) {
284:       zctx.min -= 1.e-12;
285:       zctx.max += 1.e-12;
286:     }
287:     PetscCall(PetscInfo(da, "DMDA 2d contour plot min %g max %g\n", (double)zctx.min, (double)zctx.max));

289:     if (useports) {
290:       PetscCall(PetscDrawViewPortsSet(ports, i));
291:     } else {
292:       const char *title;
293:       PetscCall(PetscViewerDrawGetDraw(viewer, i, &draw));
294:       PetscCall(DMDAGetFieldName(da, zctx.k, &title));
295:       if (title) PetscCall(PetscDrawSetTitle(draw, title));
296:     }

298:     PetscCall(PetscDrawGetPopup(draw, &popup));
299:     PetscCall(PetscDrawScalePopup(popup, zctx.min, zctx.max));
300:     PetscCall(PetscDrawSetCoordinates(draw, coors[0], coors[1], coors[2], coors[3]));
301:     PetscCall(PetscDrawZoom(draw, VecView_MPI_Draw_DA2d_Zoom, &zctx));
302:     if (!useports) PetscCall(PetscDrawSave(draw));
303:   }
304:   if (useports) {
305:     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
306:     PetscCall(PetscDrawSave(draw));
307:   }

309:   PetscCall(PetscDrawViewPortsDestroy(ports));
310:   PetscCall(PetscFree(displayfields));
311:   PetscCall(VecRestoreArrayRead(xcoorl, &zctx.xy));
312:   PetscCall(VecRestoreArrayRead(xlocal, &zctx.v));
313:   PetscFunctionReturn(PETSC_SUCCESS);
314: }

316: #if defined(PETSC_HAVE_HDF5)
317: static PetscErrorCode VecGetHDF5ChunkSize(DM_DA *da, Vec xin, PetscInt dimension, PetscInt timestep, hsize_t *chunkDims)
318: {
319:   PetscMPIInt comm_size;
320:   hsize_t     chunk_size, target_size, dim;
321:   hsize_t     vec_size = sizeof(PetscScalar) * da->M * da->N * da->P * da->w;
322:   hsize_t     avg_local_vec_size, KiB = 1024, MiB = KiB * KiB, GiB = MiB * KiB, min_size = MiB;
323:   hsize_t     max_chunks     = 64 * KiB; /* HDF5 internal limitation */
324:   hsize_t     max_chunk_size = 4 * GiB;  /* HDF5 internal limitation */
325:   PetscInt    zslices = da->p, yslices = da->n, xslices = da->m;

327:   PetscFunctionBegin;
328:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)xin), &comm_size));
329:   avg_local_vec_size = (hsize_t)PetscCeilInt(vec_size, comm_size); /* we will attempt to use this as the chunk size */

331:   target_size = (hsize_t)PetscMin((PetscInt64)vec_size, PetscMin((PetscInt64)max_chunk_size, PetscMax((PetscInt64)avg_local_vec_size, PetscMax(PetscCeilInt64(vec_size, max_chunks), (PetscInt64)min_size))));
332:   /* following line uses sizeof(PetscReal) instead of sizeof(PetscScalar) because the last dimension of chunkDims[] captures the 2* when complex numbers are being used */
333:   chunk_size = (hsize_t)PetscMax(1, chunkDims[0]) * PetscMax(1, chunkDims[1]) * PetscMax(1, chunkDims[2]) * PetscMax(1, chunkDims[3]) * PetscMax(1, chunkDims[4]) * PetscMax(1, chunkDims[5]) * sizeof(PetscReal);

335:   /*
336:    if size/rank > max_chunk_size, we need radical measures: even going down to
337:    avg_local_vec_size is not enough, so we simply use chunk size of 4 GiB no matter
338:    what, composed in the most efficient way possible.
339:    N.B. this minimises the number of chunks, which may or may not be the optimal
340:    solution. In a BG, for example, the optimal solution is probably to make # chunks = #
341:    IO nodes involved, but this author has no access to a BG to figure out how to
342:    reliably find the right number. And even then it may or may not be enough.
343:    */
344:   if (avg_local_vec_size > max_chunk_size) {
345:     /* check if we can just split local z-axis: is that enough? */
346:     zslices = PetscCeilInt(vec_size, da->p * max_chunk_size) * zslices;
347:     if (zslices > da->P) {
348:       /* lattice is too large in xy-directions, splitting z only is not enough */
349:       zslices = da->P;
350:       yslices = PetscCeilInt(vec_size, zslices * da->n * max_chunk_size) * yslices;
351:       if (yslices > da->N) {
352:         /* lattice is too large in x-direction, splitting along z, y is not enough */
353:         yslices = da->N;
354:         xslices = PetscCeilInt(vec_size, zslices * yslices * da->m * max_chunk_size) * xslices;
355:       }
356:     }
357:     dim = 0;
358:     if (timestep >= 0) ++dim;
359:     /* prefer to split z-axis, even down to planar slices */
360:     if (dimension == 3) {
361:       chunkDims[dim++] = (hsize_t)da->P / zslices;
362:       chunkDims[dim++] = (hsize_t)da->N / yslices;
363:       chunkDims[dim++] = (hsize_t)da->M / xslices;
364:     } else {
365:       /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */
366:       chunkDims[dim++] = (hsize_t)da->N / yslices;
367:       chunkDims[dim++] = (hsize_t)da->M / xslices;
368:     }
369:     chunk_size = (hsize_t)PetscMax(1, chunkDims[0]) * PetscMax(1, chunkDims[1]) * PetscMax(1, chunkDims[2]) * PetscMax(1, chunkDims[3]) * PetscMax(1, chunkDims[4]) * PetscMax(1, chunkDims[5]) * sizeof(double);
370:   } else {
371:     if (target_size < chunk_size) {
372:       /* only change the defaults if target_size < chunk_size */
373:       dim = 0;
374:       if (timestep >= 0) ++dim;
375:       /* prefer to split z-axis, even down to planar slices */
376:       if (dimension == 3) {
377:         /* try splitting the z-axis to core-size bits, i.e. divide chunk size by # comm_size in z-direction */
378:         if (target_size >= chunk_size / da->p) {
379:           /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */
380:           chunkDims[dim] = (hsize_t)PetscCeilInt(da->P, da->p);
381:         } else {
382:           /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be
383:            radical and let everyone write all they've got */
384:           chunkDims[dim++] = (hsize_t)PetscCeilInt(da->P, da->p);
385:           chunkDims[dim++] = (hsize_t)PetscCeilInt(da->N, da->n);
386:           chunkDims[dim++] = (hsize_t)PetscCeilInt(da->M, da->m);
387:         }
388:       } else {
389:         /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */
390:         if (target_size >= chunk_size / da->n) {
391:           /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */
392:           chunkDims[dim] = (hsize_t)PetscCeilInt(da->N, da->n);
393:         } else {
394:           /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be
395:            radical and let everyone write all they've got */
396:           chunkDims[dim++] = (hsize_t)PetscCeilInt(da->N, da->n);
397:           chunkDims[dim++] = (hsize_t)PetscCeilInt(da->M, da->m);
398:         }
399:       }
400:       chunk_size = (hsize_t)PetscMax(1, chunkDims[0]) * PetscMax(1, chunkDims[1]) * PetscMax(1, chunkDims[2]) * PetscMax(1, chunkDims[3]) * PetscMax(1, chunkDims[4]) * PetscMax(1, chunkDims[5]) * sizeof(double);
401:     } else {
402:       /* precomputed chunks are fine, we don't need to do anything */
403:     }
404:   }
405:   PetscFunctionReturn(PETSC_SUCCESS);
406: }
407: #endif

409: #if defined(PETSC_HAVE_HDF5)
410: PetscErrorCode VecView_MPI_HDF5_DA(Vec xin, PetscViewer viewer)
411: {
412:   PetscViewer_HDF5  *hdf5 = (PetscViewer_HDF5 *)viewer->data;
413:   DM                 dm;
414:   DM_DA             *da;
415:   hid_t              filespace;  /* file dataspace identifier */
416:   hid_t              chunkspace; /* chunk dataset property identifier */
417:   hid_t              dset_id;    /* dataset identifier */
418:   hid_t              memspace;   /* memory dataspace identifier */
419:   hid_t              file_id;
420:   hid_t              group;
421:   hid_t              memscalartype;  /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
422:   hid_t              filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
423:   hsize_t            dim;
424:   hsize_t            maxDims[6] = {0}, dims[6] = {0}, chunkDims[6] = {0}, count[6] = {0}, offset[6] = {0}; /* we depend on these being sane later on  */
425:   PetscBool          timestepping = PETSC_FALSE, dim2, spoutput;
426:   PetscInt           timestep     = PETSC_MIN_INT, dimension;
427:   const PetscScalar *x;
428:   const char        *vecname;

430:   PetscFunctionBegin;
431:   PetscCall(PetscViewerHDF5OpenGroup(viewer, NULL, &file_id, &group));
432:   PetscCall(PetscViewerHDF5IsTimestepping(viewer, &timestepping));
433:   if (timestepping) PetscCall(PetscViewerHDF5GetTimestep(viewer, &timestep));
434:   PetscCall(PetscViewerHDF5GetBaseDimension2(viewer, &dim2));
435:   PetscCall(PetscViewerHDF5GetSPOutput(viewer, &spoutput));

437:   PetscCall(VecGetDM(xin, &dm));
438:   PetscCheck(dm, PetscObjectComm((PetscObject)xin), PETSC_ERR_ARG_WRONG, "Vector not generated from a DMDA");
439:   da = (DM_DA *)dm->data;
440:   PetscCall(DMGetDimension(dm, &dimension));

442:   /* Create the dataspace for the dataset.
443:    *
444:    * dims - holds the current dimensions of the dataset
445:    *
446:    * maxDims - holds the maximum dimensions of the dataset (unlimited
447:    * for the number of time steps with the current dimensions for the
448:    * other dimensions; so only additional time steps can be added).
449:    *
450:    * chunkDims - holds the size of a single time step (required to
451:    * permit extending dataset).
452:    */
453:   dim = 0;
454:   if (timestep >= 0) {
455:     dims[dim]      = timestep + 1;
456:     maxDims[dim]   = H5S_UNLIMITED;
457:     chunkDims[dim] = 1;
458:     ++dim;
459:   }
460:   if (dimension == 3) {
461:     PetscCall(PetscHDF5IntCast(da->P, dims + dim));
462:     maxDims[dim]   = dims[dim];
463:     chunkDims[dim] = dims[dim];
464:     ++dim;
465:   }
466:   if (dimension > 1) {
467:     PetscCall(PetscHDF5IntCast(da->N, dims + dim));
468:     maxDims[dim]   = dims[dim];
469:     chunkDims[dim] = dims[dim];
470:     ++dim;
471:   }
472:   PetscCall(PetscHDF5IntCast(da->M, dims + dim));
473:   maxDims[dim]   = dims[dim];
474:   chunkDims[dim] = dims[dim];
475:   ++dim;
476:   if (da->w > 1 || dim2) {
477:     PetscCall(PetscHDF5IntCast(da->w, dims + dim));
478:     maxDims[dim]   = dims[dim];
479:     chunkDims[dim] = dims[dim];
480:     ++dim;
481:   }
482:   #if defined(PETSC_USE_COMPLEX)
483:   dims[dim]      = 2;
484:   maxDims[dim]   = dims[dim];
485:   chunkDims[dim] = dims[dim];
486:   ++dim;
487:   #endif

489:   PetscCall(VecGetHDF5ChunkSize(da, xin, dimension, timestep, chunkDims));

491:   PetscCallHDF5Return(filespace, H5Screate_simple, (dim, dims, maxDims));

493:   #if defined(PETSC_USE_REAL_SINGLE)
494:   memscalartype  = H5T_NATIVE_FLOAT;
495:   filescalartype = H5T_NATIVE_FLOAT;
496:   #elif defined(PETSC_USE_REAL___FLOAT128)
497:     #error "HDF5 output with 128 bit floats not supported."
498:   #elif defined(PETSC_USE_REAL___FP16)
499:     #error "HDF5 output with 16 bit floats not supported."
500:   #else
501:   memscalartype = H5T_NATIVE_DOUBLE;
502:   if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT;
503:   else filescalartype = H5T_NATIVE_DOUBLE;
504:   #endif

506:   /* Create the dataset with default properties and close filespace */
507:   PetscCall(PetscObjectGetName((PetscObject)xin, &vecname));
508:   if (!H5Lexists(group, vecname, H5P_DEFAULT)) {
509:     /* Create chunk */
510:     PetscCallHDF5Return(chunkspace, H5Pcreate, (H5P_DATASET_CREATE));
511:     PetscCallHDF5(H5Pset_chunk, (chunkspace, dim, chunkDims));

513:     PetscCallHDF5Return(dset_id, H5Dcreate2, (group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT));
514:   } else {
515:     PetscCallHDF5Return(dset_id, H5Dopen2, (group, vecname, H5P_DEFAULT));
516:     PetscCallHDF5(H5Dset_extent, (dset_id, dims));
517:   }
518:   PetscCallHDF5(H5Sclose, (filespace));

520:   /* Each process defines a dataset and writes it to the hyperslab in the file */
521:   dim = 0;
522:   if (timestep >= 0) {
523:     offset[dim] = timestep;
524:     ++dim;
525:   }
526:   if (dimension == 3) PetscCall(PetscHDF5IntCast(da->zs, offset + dim++));
527:   if (dimension > 1) PetscCall(PetscHDF5IntCast(da->ys, offset + dim++));
528:   PetscCall(PetscHDF5IntCast(da->xs / da->w, offset + dim++));
529:   if (da->w > 1 || dim2) offset[dim++] = 0;
530:   #if defined(PETSC_USE_COMPLEX)
531:   offset[dim++] = 0;
532:   #endif
533:   dim = 0;
534:   if (timestep >= 0) {
535:     count[dim] = 1;
536:     ++dim;
537:   }
538:   if (dimension == 3) PetscCall(PetscHDF5IntCast(da->ze - da->zs, count + dim++));
539:   if (dimension > 1) PetscCall(PetscHDF5IntCast(da->ye - da->ys, count + dim++));
540:   PetscCall(PetscHDF5IntCast((da->xe - da->xs) / da->w, count + dim++));
541:   if (da->w > 1 || dim2) PetscCall(PetscHDF5IntCast(da->w, count + dim++));
542:   #if defined(PETSC_USE_COMPLEX)
543:   count[dim++] = 2;
544:   #endif
545:   PetscCallHDF5Return(memspace, H5Screate_simple, (dim, count, NULL));
546:   PetscCallHDF5Return(filespace, H5Dget_space, (dset_id));
547:   PetscCallHDF5(H5Sselect_hyperslab, (filespace, H5S_SELECT_SET, offset, NULL, count, NULL));

549:   PetscCall(VecGetArrayRead(xin, &x));
550:   PetscCallHDF5(H5Dwrite, (dset_id, memscalartype, memspace, filespace, hdf5->dxpl_id, x));
551:   PetscCallHDF5(H5Fflush, (file_id, H5F_SCOPE_GLOBAL));
552:   PetscCall(VecRestoreArrayRead(xin, &x));

554:   #if defined(PETSC_USE_COMPLEX)
555:   {
556:     PetscBool tru = PETSC_TRUE;
557:     PetscCall(PetscViewerHDF5WriteObjectAttribute(viewer, (PetscObject)xin, "complex", PETSC_BOOL, &tru));
558:   }
559:   #endif
560:   if (timestepping) PetscCall(PetscViewerHDF5WriteObjectAttribute(viewer, (PetscObject)xin, "timestepping", PETSC_BOOL, &timestepping));

562:   /* Close/release resources */
563:   if (group != file_id) PetscCallHDF5(H5Gclose, (group));
564:   PetscCallHDF5(H5Sclose, (filespace));
565:   PetscCallHDF5(H5Sclose, (memspace));
566:   PetscCallHDF5(H5Dclose, (dset_id));
567:   PetscCall(PetscInfo(xin, "Wrote Vec object with name %s\n", vecname));
568:   PetscFunctionReturn(PETSC_SUCCESS);
569: }
570: #endif

572: extern PetscErrorCode VecView_MPI_Draw_DA1d(Vec, PetscViewer);

574: #if defined(PETSC_HAVE_MPIIO)
575: static PetscErrorCode DMDAArrayMPIIO(DM da, PetscViewer viewer, Vec xin, PetscBool write)
576: {
577:   MPI_File           mfdes;
578:   PetscMPIInt        gsizes[4], lsizes[4], lstarts[4], asiz, dof;
579:   MPI_Datatype       view;
580:   const PetscScalar *array;
581:   MPI_Offset         off;
582:   MPI_Aint           ub, ul;
583:   PetscInt           type, rows, vecrows, tr[2];
584:   DM_DA             *dd = (DM_DA *)da->data;
585:   PetscBool          skipheader;

587:   PetscFunctionBegin;
588:   PetscCall(VecGetSize(xin, &vecrows));
589:   PetscCall(PetscViewerBinaryGetSkipHeader(viewer, &skipheader));
590:   if (!write) {
591:     /* Read vector header. */
592:     if (!skipheader) {
593:       PetscCall(PetscViewerBinaryRead(viewer, tr, 2, NULL, PETSC_INT));
594:       type = tr[0];
595:       rows = tr[1];
596:       PetscCheck(type == VEC_FILE_CLASSID, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONG, "Not vector next in file");
597:       PetscCheck(rows == vecrows, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Vector in file not same size as DMDA vector");
598:     }
599:   } else {
600:     tr[0] = VEC_FILE_CLASSID;
601:     tr[1] = vecrows;
602:     if (!skipheader) PetscCall(PetscViewerBinaryWrite(viewer, tr, 2, PETSC_INT));
603:   }

605:   PetscCall(PetscMPIIntCast(dd->w, &dof));
606:   gsizes[0] = dof;
607:   PetscCall(PetscMPIIntCast(dd->M, gsizes + 1));
608:   PetscCall(PetscMPIIntCast(dd->N, gsizes + 2));
609:   PetscCall(PetscMPIIntCast(dd->P, gsizes + 3));
610:   lsizes[0] = dof;
611:   PetscCall(PetscMPIIntCast((dd->xe - dd->xs) / dof, lsizes + 1));
612:   PetscCall(PetscMPIIntCast(dd->ye - dd->ys, lsizes + 2));
613:   PetscCall(PetscMPIIntCast(dd->ze - dd->zs, lsizes + 3));
614:   lstarts[0] = 0;
615:   PetscCall(PetscMPIIntCast(dd->xs / dof, lstarts + 1));
616:   PetscCall(PetscMPIIntCast(dd->ys, lstarts + 2));
617:   PetscCall(PetscMPIIntCast(dd->zs, lstarts + 3));
618:   PetscCallMPI(MPI_Type_create_subarray(da->dim + 1, gsizes, lsizes, lstarts, MPI_ORDER_FORTRAN, MPIU_SCALAR, &view));
619:   PetscCallMPI(MPI_Type_commit(&view));

621:   PetscCall(PetscViewerBinaryGetMPIIODescriptor(viewer, &mfdes));
622:   PetscCall(PetscViewerBinaryGetMPIIOOffset(viewer, &off));
623:   PetscCallMPI(MPI_File_set_view(mfdes, off, MPIU_SCALAR, view, (char *)"native", MPI_INFO_NULL));
624:   PetscCall(VecGetArrayRead(xin, &array));
625:   asiz = lsizes[1] * (lsizes[2] > 0 ? lsizes[2] : 1) * (lsizes[3] > 0 ? lsizes[3] : 1) * dof;
626:   if (write) PetscCall(MPIU_File_write_all(mfdes, (PetscScalar *)array, asiz, MPIU_SCALAR, MPI_STATUS_IGNORE));
627:   else PetscCall(MPIU_File_read_all(mfdes, (PetscScalar *)array, asiz, MPIU_SCALAR, MPI_STATUS_IGNORE));
628:   PetscCallMPI(MPI_Type_get_extent(view, &ul, &ub));
629:   PetscCall(PetscViewerBinaryAddMPIIOOffset(viewer, ub));
630:   PetscCall(VecRestoreArrayRead(xin, &array));
631:   PetscCallMPI(MPI_Type_free(&view));
632:   PetscFunctionReturn(PETSC_SUCCESS);
633: }
634: #endif

636: PetscErrorCode VecView_MPI_DA(Vec xin, PetscViewer viewer)
637: {
638:   DM        da;
639:   PetscInt  dim;
640:   Vec       natural;
641:   PetscBool isdraw, isvtk, isglvis;
642: #if defined(PETSC_HAVE_HDF5)
643:   PetscBool ishdf5;
644: #endif
645:   const char       *prefix, *name;
646:   PetscViewerFormat format;

648:   PetscFunctionBegin;
649:   PetscCall(VecGetDM(xin, &da));
650:   PetscCheck(da, PetscObjectComm((PetscObject)xin), PETSC_ERR_ARG_WRONG, "Vector not generated from a DMDA");
651:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
652:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERVTK, &isvtk));
653: #if defined(PETSC_HAVE_HDF5)
654:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERHDF5, &ishdf5));
655: #endif
656:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERGLVIS, &isglvis));
657:   if (isdraw) {
658:     PetscCall(DMDAGetInfo(da, &dim, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
659:     if (dim == 1) {
660:       PetscCall(VecView_MPI_Draw_DA1d(xin, viewer));
661:     } else if (dim == 2) {
662:       PetscCall(VecView_MPI_Draw_DA2d(xin, viewer));
663:     } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Cannot graphically view vector associated with this dimensional DMDA %" PetscInt_FMT, dim);
664:   } else if (isvtk) { /* Duplicate the Vec */
665:     Vec Y;
666:     PetscCall(VecDuplicate(xin, &Y));
667:     if (((PetscObject)xin)->name) {
668:       /* If xin was named, copy the name over to Y. The duplicate names are safe because nobody else will ever see Y. */
669:       PetscCall(PetscObjectSetName((PetscObject)Y, ((PetscObject)xin)->name));
670:     }
671:     PetscCall(VecCopy(xin, Y));
672:     {
673:       PetscObject dmvtk;
674:       PetscBool   compatible, compatibleSet;
675:       PetscCall(PetscViewerVTKGetDM(viewer, &dmvtk));
676:       if (dmvtk) {
678:         PetscCall(DMGetCompatibility(da, (DM)dmvtk, &compatible, &compatibleSet));
679:         PetscCheck(compatibleSet && compatible, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_INCOMP, "Cannot confirm compatibility of DMs associated with Vecs viewed in the same VTK file. Check that grids are the same.");
680:       }
681:       PetscCall(PetscViewerVTKAddField(viewer, (PetscObject)da, DMDAVTKWriteAll, PETSC_DEFAULT, PETSC_VTK_POINT_FIELD, PETSC_FALSE, (PetscObject)Y));
682:     }
683: #if defined(PETSC_HAVE_HDF5)
684:   } else if (ishdf5) {
685:     PetscCall(VecView_MPI_HDF5_DA(xin, viewer));
686: #endif
687:   } else if (isglvis) {
688:     PetscCall(VecView_GLVis(xin, viewer));
689:   } else {
690: #if defined(PETSC_HAVE_MPIIO)
691:     PetscBool isbinary, isMPIIO;

693:     PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
694:     if (isbinary) {
695:       PetscCall(PetscViewerBinaryGetUseMPIIO(viewer, &isMPIIO));
696:       if (isMPIIO) {
697:         PetscCall(DMDAArrayMPIIO(da, viewer, xin, PETSC_TRUE));
698:         PetscFunctionReturn(PETSC_SUCCESS);
699:       }
700:     }
701: #endif

703:     /* call viewer on natural ordering */
704:     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)xin, &prefix));
705:     PetscCall(DMDACreateNaturalVector(da, &natural));
706:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)natural, prefix));
707:     PetscCall(DMDAGlobalToNaturalBegin(da, xin, INSERT_VALUES, natural));
708:     PetscCall(DMDAGlobalToNaturalEnd(da, xin, INSERT_VALUES, natural));
709:     PetscCall(PetscObjectGetName((PetscObject)xin, &name));
710:     PetscCall(PetscObjectSetName((PetscObject)natural, name));

712:     PetscCall(PetscViewerGetFormat(viewer, &format));
713:     if (format == PETSC_VIEWER_BINARY_MATLAB) {
714:       /* temporarily remove viewer format so it won't trigger in the VecView() */
715:       PetscCall(PetscViewerPushFormat(viewer, PETSC_VIEWER_DEFAULT));
716:     }

718:     ((PetscObject)natural)->donotPetscObjectPrintClassNamePrefixType = PETSC_TRUE;
719:     PetscCall(VecView(natural, viewer));
720:     ((PetscObject)natural)->donotPetscObjectPrintClassNamePrefixType = PETSC_FALSE;

722:     if (format == PETSC_VIEWER_BINARY_MATLAB) {
723:       MPI_Comm    comm;
724:       FILE       *info;
725:       const char *fieldname;
726:       char        fieldbuf[256];
727:       PetscInt    dim, ni, nj, nk, pi, pj, pk, dof, n;

729:       /* set the viewer format back into the viewer */
730:       PetscCall(PetscViewerPopFormat(viewer));
731:       PetscCall(PetscObjectGetComm((PetscObject)viewer, &comm));
732:       PetscCall(PetscViewerBinaryGetInfoPointer(viewer, &info));
733:       PetscCall(DMDAGetInfo(da, &dim, &ni, &nj, &nk, &pi, &pj, &pk, &dof, NULL, NULL, NULL, NULL, NULL));
734:       PetscCall(PetscFPrintf(comm, info, "#--- begin code written by PetscViewerBinary for MATLAB format ---#\n"));
735:       PetscCall(PetscFPrintf(comm, info, "#$$ tmp = PetscBinaryRead(fd); \n"));
736:       if (dim == 1) PetscCall(PetscFPrintf(comm, info, "#$$ tmp = reshape(tmp,%" PetscInt_FMT ",%" PetscInt_FMT ");\n", dof, ni));
737:       if (dim == 2) PetscCall(PetscFPrintf(comm, info, "#$$ tmp = reshape(tmp,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ");\n", dof, ni, nj));
738:       if (dim == 3) PetscCall(PetscFPrintf(comm, info, "#$$ tmp = reshape(tmp,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ");\n", dof, ni, nj, nk));

740:       for (n = 0; n < dof; n++) {
741:         PetscCall(DMDAGetFieldName(da, n, &fieldname));
742:         if (!fieldname || !fieldname[0]) {
743:           PetscCall(PetscSNPrintf(fieldbuf, sizeof fieldbuf, "field%" PetscInt_FMT, n));
744:           fieldname = fieldbuf;
745:         }
746:         if (dim == 1) PetscCall(PetscFPrintf(comm, info, "#$$ Set.%s.%s = squeeze(tmp(%" PetscInt_FMT ",:))';\n", name, fieldname, n + 1));
747:         if (dim == 2) PetscCall(PetscFPrintf(comm, info, "#$$ Set.%s.%s = squeeze(tmp(%" PetscInt_FMT ",:,:))';\n", name, fieldname, n + 1));
748:         if (dim == 3) PetscCall(PetscFPrintf(comm, info, "#$$ Set.%s.%s = permute(squeeze(tmp(%" PetscInt_FMT ",:,:,:)),[2 1 3]);\n", name, fieldname, n + 1));
749:       }
750:       PetscCall(PetscFPrintf(comm, info, "#$$ clear tmp; \n"));
751:       PetscCall(PetscFPrintf(comm, info, "#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n"));
752:     }

754:     PetscCall(VecDestroy(&natural));
755:   }
756:   PetscFunctionReturn(PETSC_SUCCESS);
757: }

759: #if defined(PETSC_HAVE_HDF5)
760: PetscErrorCode VecLoad_HDF5_DA(Vec xin, PetscViewer viewer)
761: {
762:   PetscViewer_HDF5 *hdf5 = (PetscViewer_HDF5 *)viewer->data;
763:   DM                da;
764:   int               dim, rdim;
765:   hsize_t           dims[6] = {0}, count[6] = {0}, offset[6] = {0};
766:   PetscBool         dim2 = PETSC_FALSE, timestepping = PETSC_FALSE;
767:   PetscInt          dimension, timestep              = PETSC_MIN_INT, dofInd;
768:   PetscScalar      *x;
769:   const char       *vecname;
770:   hid_t             filespace; /* file dataspace identifier */
771:   hid_t             dset_id;   /* dataset identifier */
772:   hid_t             memspace;  /* memory dataspace identifier */
773:   hid_t             file_id, group;
774:   hid_t             scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
775:   DM_DA            *dd;

777:   PetscFunctionBegin;
778:   #if defined(PETSC_USE_REAL_SINGLE)
779:   scalartype = H5T_NATIVE_FLOAT;
780:   #elif defined(PETSC_USE_REAL___FLOAT128)
781:     #error "HDF5 output with 128 bit floats not supported."
782:   #elif defined(PETSC_USE_REAL___FP16)
783:     #error "HDF5 output with 16 bit floats not supported."
784:   #else
785:   scalartype = H5T_NATIVE_DOUBLE;
786:   #endif

788:   PetscCall(PetscViewerHDF5OpenGroup(viewer, NULL, &file_id, &group));
789:   PetscCall(PetscObjectGetName((PetscObject)xin, &vecname));
790:   PetscCall(PetscViewerHDF5CheckTimestepping_Internal(viewer, vecname));
791:   PetscCall(PetscViewerHDF5IsTimestepping(viewer, &timestepping));
792:   if (timestepping) PetscCall(PetscViewerHDF5GetTimestep(viewer, &timestep));
793:   PetscCall(VecGetDM(xin, &da));
794:   dd = (DM_DA *)da->data;
795:   PetscCall(DMGetDimension(da, &dimension));

797:   /* Open dataset */
798:   PetscCallHDF5Return(dset_id, H5Dopen2, (group, vecname, H5P_DEFAULT));

800:   /* Retrieve the dataspace for the dataset */
801:   PetscCallHDF5Return(filespace, H5Dget_space, (dset_id));
802:   PetscCallHDF5Return(rdim, H5Sget_simple_extent_dims, (filespace, dims, NULL));

804:   /* Expected dimension for holding the dof's */
805:   #if defined(PETSC_USE_COMPLEX)
806:   dofInd = rdim - 2;
807:   #else
808:   dofInd = rdim - 1;
809:   #endif

811:   /* The expected number of dimensions, assuming basedimension2 = false */
812:   dim = dimension;
813:   if (dd->w > 1) ++dim;
814:   if (timestep >= 0) ++dim;
815:   #if defined(PETSC_USE_COMPLEX)
816:   ++dim;
817:   #endif

819:   /* In this case the input dataset have one extra, unexpected dimension. */
820:   if (rdim == dim + 1) {
821:     /* In this case the block size unity */
822:     if (dd->w == 1 && dims[dofInd] == 1) dim2 = PETSC_TRUE;

824:     /* Special error message for the case where dof does not match the input file */
825:     else PetscCheck(dd->w == (PetscInt)dims[dofInd], PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Number of dofs in file is %" PetscInt_FMT ", not %" PetscInt_FMT " as expected", (PetscInt)dims[dofInd], dd->w);

827:     /* Other cases where rdim != dim cannot be handled currently */
828:   } else PetscCheck(rdim == dim, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file is %d, not %d as expected with dof = %" PetscInt_FMT, rdim, dim, dd->w);

830:   /* Set up the hyperslab size */
831:   dim = 0;
832:   if (timestep >= 0) {
833:     offset[dim] = timestep;
834:     count[dim]  = 1;
835:     ++dim;
836:   }
837:   if (dimension == 3) {
838:     PetscCall(PetscHDF5IntCast(dd->zs, offset + dim));
839:     PetscCall(PetscHDF5IntCast(dd->ze - dd->zs, count + dim));
840:     ++dim;
841:   }
842:   if (dimension > 1) {
843:     PetscCall(PetscHDF5IntCast(dd->ys, offset + dim));
844:     PetscCall(PetscHDF5IntCast(dd->ye - dd->ys, count + dim));
845:     ++dim;
846:   }
847:   PetscCall(PetscHDF5IntCast(dd->xs / dd->w, offset + dim));
848:   PetscCall(PetscHDF5IntCast((dd->xe - dd->xs) / dd->w, count + dim));
849:   ++dim;
850:   if (dd->w > 1 || dim2) {
851:     offset[dim] = 0;
852:     PetscCall(PetscHDF5IntCast(dd->w, count + dim));
853:     ++dim;
854:   }
855:   #if defined(PETSC_USE_COMPLEX)
856:   offset[dim] = 0;
857:   count[dim]  = 2;
858:   ++dim;
859:   #endif

861:   /* Create the memory and filespace */
862:   PetscCallHDF5Return(memspace, H5Screate_simple, (dim, count, NULL));
863:   PetscCallHDF5(H5Sselect_hyperslab, (filespace, H5S_SELECT_SET, offset, NULL, count, NULL));

865:   PetscCall(VecGetArray(xin, &x));
866:   PetscCallHDF5(H5Dread, (dset_id, scalartype, memspace, filespace, hdf5->dxpl_id, x));
867:   PetscCall(VecRestoreArray(xin, &x));

869:   /* Close/release resources */
870:   if (group != file_id) PetscCallHDF5(H5Gclose, (group));
871:   PetscCallHDF5(H5Sclose, (filespace));
872:   PetscCallHDF5(H5Sclose, (memspace));
873:   PetscCallHDF5(H5Dclose, (dset_id));
874:   PetscFunctionReturn(PETSC_SUCCESS);
875: }
876: #endif

878: PetscErrorCode VecLoad_Binary_DA(Vec xin, PetscViewer viewer)
879: {
880:   DM          da;
881:   Vec         natural;
882:   const char *prefix;
883:   PetscInt    bs;
884:   PetscBool   flag;
885:   DM_DA      *dd;
886: #if defined(PETSC_HAVE_MPIIO)
887:   PetscBool isMPIIO;
888: #endif

890:   PetscFunctionBegin;
891:   PetscCall(VecGetDM(xin, &da));
892:   dd = (DM_DA *)da->data;
893: #if defined(PETSC_HAVE_MPIIO)
894:   PetscCall(PetscViewerBinaryGetUseMPIIO(viewer, &isMPIIO));
895:   if (isMPIIO) {
896:     PetscCall(DMDAArrayMPIIO(da, viewer, xin, PETSC_FALSE));
897:     PetscFunctionReturn(PETSC_SUCCESS);
898:   }
899: #endif

901:   PetscCall(PetscObjectGetOptionsPrefix((PetscObject)xin, &prefix));
902:   PetscCall(DMDACreateNaturalVector(da, &natural));
903:   PetscCall(PetscObjectSetName((PetscObject)natural, ((PetscObject)xin)->name));
904:   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)natural, prefix));
905:   PetscCall(VecLoad(natural, viewer));
906:   PetscCall(DMDANaturalToGlobalBegin(da, natural, INSERT_VALUES, xin));
907:   PetscCall(DMDANaturalToGlobalEnd(da, natural, INSERT_VALUES, xin));
908:   PetscCall(VecDestroy(&natural));
909:   PetscCall(PetscInfo(xin, "Loading vector from natural ordering into DMDA\n"));
910:   PetscCall(PetscOptionsGetInt(NULL, ((PetscObject)xin)->prefix, "-vecload_block_size", &bs, &flag));
911:   if (flag && bs != dd->w) PetscCall(PetscInfo(xin, "Block size in file %" PetscInt_FMT " not equal to DMDA's dof %" PetscInt_FMT "\n", bs, dd->w));
912:   PetscFunctionReturn(PETSC_SUCCESS);
913: }

915: PetscErrorCode VecLoad_Default_DA(Vec xin, PetscViewer viewer)
916: {
917:   DM        da;
918:   PetscBool isbinary;
919: #if defined(PETSC_HAVE_HDF5)
920:   PetscBool ishdf5;
921: #endif

923:   PetscFunctionBegin;
924:   PetscCall(VecGetDM(xin, &da));
925:   PetscCheck(da, PetscObjectComm((PetscObject)xin), PETSC_ERR_ARG_WRONG, "Vector not generated from a DMDA");

927: #if defined(PETSC_HAVE_HDF5)
928:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERHDF5, &ishdf5));
929: #endif
930:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));

932:   if (isbinary) {
933:     PetscCall(VecLoad_Binary_DA(xin, viewer));
934: #if defined(PETSC_HAVE_HDF5)
935:   } else if (ishdf5) {
936:     PetscCall(VecLoad_HDF5_DA(xin, viewer));
937: #endif
938:   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Viewer type %s not supported for vector loading", ((PetscObject)viewer)->type_name);
939:   PetscFunctionReturn(PETSC_SUCCESS);
940: }