Actual source code: dagtona.c


  2: /*
  3:      Tools to help solve the coarse grid problem redundantly.
  4:   Provides two scatter contexts that (1) map from the usual global vector
  5:   to all processors the entire vector in NATURAL numbering and (2)
  6:   from the entire vector on each processor in natural numbering extracts
  7:   out this processors piece in GLOBAL numbering
  8: */

 10: #include <petsc/private/dmdaimpl.h>

 12: /*@
 13:    DMDAGlobalToNaturalAllCreate - Creates a scatter context that maps from the
 14:      global vector the entire vector to each processor in natural numbering

 16:    Collective

 18:    Input Parameter:
 19: .  da - the distributed array context

 21:    Output Parameter:
 22: .  scatter - the scatter context

 24:    Level: advanced

 26: .seealso: `DM`, `DMDA`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
 27:           `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
 28: @*/
 29: PetscErrorCode DMDAGlobalToNaturalAllCreate(DM da, VecScatter *scatter)
 30: {
 31:   PetscInt N;
 32:   IS       from, to;
 33:   Vec      tmplocal, global;
 34:   AO       ao;
 35:   DM_DA   *dd = (DM_DA *)da->data;

 37:   PetscFunctionBegin;
 40:   PetscCall(DMDAGetAO(da, &ao));

 42:   /* create the scatter context */
 43:   PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)da), dd->w, dd->Nlocal, PETSC_DETERMINE, NULL, &global));
 44:   PetscCall(VecGetSize(global, &N));
 45:   PetscCall(ISCreateStride(PetscObjectComm((PetscObject)da), N, 0, 1, &to));
 46:   PetscCall(AOPetscToApplicationIS(ao, to));
 47:   PetscCall(ISCreateStride(PetscObjectComm((PetscObject)da), N, 0, 1, &from));
 48:   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, dd->w, N, NULL, &tmplocal));
 49:   PetscCall(VecScatterCreate(global, from, tmplocal, to, scatter));
 50:   PetscCall(VecDestroy(&tmplocal));
 51:   PetscCall(VecDestroy(&global));
 52:   PetscCall(ISDestroy(&from));
 53:   PetscCall(ISDestroy(&to));
 54:   PetscFunctionReturn(PETSC_SUCCESS);
 55: }

 57: /*@
 58:    DMDANaturalAllToGlobalCreate - Creates a scatter context that maps from a copy
 59:      of the entire vector on each processor to its local part in the global vector.

 61:    Collective

 63:    Input Parameter:
 64: .  da - the distributed array context

 66:    Output Parameter:
 67: .  scatter - the scatter context

 69:    Level: advanced

 71: .seealso: `DM`, `DMDA`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
 72:           `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
 73: @*/
 74: PetscErrorCode DMDANaturalAllToGlobalCreate(DM da, VecScatter *scatter)
 75: {
 76:   DM_DA   *dd = (DM_DA *)da->data;
 77:   PetscInt M, m = dd->Nlocal, start;
 78:   IS       from, to;
 79:   Vec      tmplocal, global;
 80:   AO       ao;

 82:   PetscFunctionBegin;
 85:   PetscCall(DMDAGetAO(da, &ao));

 87:   /* create the scatter context */
 88:   PetscCall(MPIU_Allreduce(&m, &M, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)da)));
 89:   PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)da), dd->w, m, PETSC_DETERMINE, NULL, &global));
 90:   PetscCall(VecGetOwnershipRange(global, &start, NULL));
 91:   PetscCall(ISCreateStride(PetscObjectComm((PetscObject)da), m, start, 1, &from));
 92:   PetscCall(AOPetscToApplicationIS(ao, from));
 93:   PetscCall(ISCreateStride(PetscObjectComm((PetscObject)da), m, start, 1, &to));
 94:   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, dd->w, M, NULL, &tmplocal));
 95:   PetscCall(VecScatterCreate(tmplocal, from, global, to, scatter));
 96:   PetscCall(VecDestroy(&tmplocal));
 97:   PetscCall(VecDestroy(&global));
 98:   PetscCall(ISDestroy(&from));
 99:   PetscCall(ISDestroy(&to));
100:   PetscFunctionReturn(PETSC_SUCCESS);
101: }