Actual source code: ex25.c


  2: static char help[] = "Takes a patch of a large DMDA vector to one process.\n\n";

  4: #include <petscdm.h>
  5: #include <petscdmda.h>
  6: #include <petscdmpatch.h>
  7: #include <petscsf.h>

  9: typedef struct {
 10:   PetscScalar x, y;
 11: } Field;

 13: int main(int argc, char **argv)
 14: {
 15:   Vec         xy, sxy;
 16:   DM          da, sda = NULL;
 17:   PetscSF     sf;
 18:   PetscInt    m = 10, n = 10, dof = 2;
 19:   MatStencil  lower = {0, 3, 2, 0}, upper = {0, 7, 8, 0}; /* These are in the order of the z, y, x, logical coordinates, the fourth entry is ignored */
 20:   MPI_Comm    comm;
 21:   PetscMPIInt rank;

 23:   PetscFunctionBeginUser;
 24:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));

 26:   /* create the large DMDA and set coordinates (which we will copy down to the small DA). */
 27:   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_BOX, m, n, PETSC_DECIDE, PETSC_DECIDE, dof, 1, 0, 0, &da));
 28:   PetscCall(DMSetFromOptions(da));
 29:   PetscCall(DMSetUp(da));
 30:   PetscCall(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
 31:   /* Just as a simple example we use the coordinates as the variables in the vectors we wish to examine. */
 32:   PetscCall(DMGetCoordinates(da, &xy));
 33:   /* The vector entries are displayed in the "natural" ordering on the two dimensional grid; interlaced x and y with with the x variable increasing more rapidly than the y */
 34:   PetscCall(VecView(xy, 0));

 36:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 37:   if (rank == 0) comm = MPI_COMM_SELF;
 38:   else comm = MPI_COMM_NULL;

 40:   PetscCall(DMPatchZoom(da, lower, upper, comm, &sda, NULL, &sf));
 41:   if (rank == 0) {
 42:     PetscCall(DMCreateGlobalVector(sda, &sxy));
 43:   } else {
 44:     PetscCall(VecCreateSeq(PETSC_COMM_SELF, 0, &sxy));
 45:   }
 46:   /*  A PetscSF can also be used as a VecScatter context */
 47:   PetscCall(VecScatterBegin(sf, xy, sxy, INSERT_VALUES, SCATTER_FORWARD));
 48:   PetscCall(VecScatterEnd(sf, xy, sxy, INSERT_VALUES, SCATTER_FORWARD));
 49:   /* Only rank == 0 has the entries of the patch, so run code only at that rank */
 50:   if (rank == 0) {
 51:     Field       **vars;
 52:     DMDALocalInfo info;
 53:     PetscInt      i, j;
 54:     PetscScalar   sum = 0;

 56:     /* The vector entries of the patch are displayed in the "natural" ordering on the two grid; interlaced x and y with with the x variable increasing more rapidly */
 57:     PetscCall(VecView(sxy, PETSC_VIEWER_STDOUT_SELF));
 58:     /* Compute some trivial statistic of the coordinates */
 59:     PetscCall(DMDAGetLocalInfo(sda, &info));
 60:     PetscCall(DMDAVecGetArray(sda, sxy, &vars));
 61:     /* Loop over the patch of the entire domain */
 62:     for (j = info.ys; j < info.ys + info.ym; j++) {
 63:       for (i = info.xs; i < info.xs + info.xm; i++) sum += vars[j][i].x;
 64:     }
 65:     PetscCall(PetscPrintf(PETSC_COMM_SELF, "The sum of the x coordinates is %g\n", (double)PetscRealPart(sum)));
 66:     PetscCall(DMDAVecRestoreArray(sda, sxy, &vars));
 67:   }

 69:   PetscCall(VecDestroy(&sxy));
 70:   PetscCall(PetscSFDestroy(&sf));
 71:   PetscCall(DMDestroy(&sda));
 72:   PetscCall(DMDestroy(&da));
 73:   PetscCall(PetscFinalize());
 74:   return 0;
 75: }

 77: /*TEST

 79:    test:

 81:    test:
 82:      nsize: 4
 83:      suffix: 2

 85: TEST*/