Actual source code: mpiu.c


  2: #include <petscsys.h>
  3: #include <petsc/private/petscimpl.h>
  4: /*
  5:     Note that tag of 0 is ok because comm is a private communicator
  6:   generated below just for these routines.
  7: */

  9: PETSC_INTERN PetscErrorCode PetscSequentialPhaseBegin_Private(MPI_Comm comm, int ng)
 10: {
 11:   PetscMPIInt rank, size, tag = 0;
 12:   MPI_Status  status;

 14:   PetscFunctionBegin;
 15:   PetscCallMPI(MPI_Comm_size(comm, &size));
 16:   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
 17:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
 18:   if (rank) PetscCallMPI(MPI_Recv(NULL, 0, MPI_INT, rank - 1, tag, comm, &status));
 19:   /* Send to the next process in the group unless we are the last process */
 20:   if ((rank % ng) < ng - 1 && rank != size - 1) PetscCallMPI(MPI_Send(NULL, 0, MPI_INT, rank + 1, tag, comm));
 21:   PetscFunctionReturn(PETSC_SUCCESS);
 22: }

 24: PETSC_INTERN PetscErrorCode PetscSequentialPhaseEnd_Private(MPI_Comm comm, int ng)
 25: {
 26:   PetscMPIInt rank, size, tag = 0;
 27:   MPI_Status  status;

 29:   PetscFunctionBegin;
 30:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
 31:   PetscCallMPI(MPI_Comm_size(comm, &size));
 32:   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);

 34:   /* Send to the first process in the next group */
 35:   if ((rank % ng) == ng - 1 || rank == size - 1) PetscCallMPI(MPI_Send(NULL, 0, MPI_INT, (rank + 1) % size, tag, comm));
 36:   if (rank == 0) PetscCallMPI(MPI_Recv(NULL, 0, MPI_INT, size - 1, tag, comm, &status));
 37:   PetscFunctionReturn(PETSC_SUCCESS);
 38: }

 40: /* ---------------------------------------------------------------------*/
 41: /*
 42:     The variable Petsc_Seq_keyval is used to indicate an MPI attribute that
 43:   is attached to a communicator that manages the sequential phase code below.
 44: */
 45: PetscMPIInt Petsc_Seq_keyval = MPI_KEYVAL_INVALID;

 47: /*@
 48:    PetscSequentialPhaseBegin - Begins a sequential section of code.

 50:    Collective

 52:    Input Parameters:
 53: +  comm - Communicator to sequentialize over
 54: -  ng   - Number in processor group.  This many processes are allowed to execute
 55:    at the same time (usually 1)

 57:    Level: intermediate

 59:    Notes:
 60:    `PetscSequentialPhaseBegin()` and `PetscSequentialPhaseEnd()` provide a
 61:    way to force a section of code to be executed by the processes in
 62:    rank order.  Typically, this is done with
 63: .vb
 64:       PetscSequentialPhaseBegin(comm, 1);
 65:       <code to be executed sequentially>
 66:       PetscSequentialPhaseEnd(comm, 1);
 67: .ve

 69:    You should use `PetscSynchronizedPrintf()` to ensure output between MPI ranks is properly order and not these routines.

 71: .seealso: `PetscSequentialPhaseEnd()`, `PetscSynchronizedPrintf()`
 72: @*/
 73: PetscErrorCode PetscSequentialPhaseBegin(MPI_Comm comm, int ng)
 74: {
 75:   PetscMPIInt size;
 76:   MPI_Comm    local_comm, *addr_local_comm;

 78:   PetscFunctionBegin;
 79:   PetscCall(PetscSysInitializePackage());
 80:   PetscCallMPI(MPI_Comm_size(comm, &size));
 81:   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);

 83:   /* Get the private communicator for the sequential operations */
 84:   if (Petsc_Seq_keyval == MPI_KEYVAL_INVALID) PetscCallMPI(MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN, MPI_COMM_NULL_DELETE_FN, &Petsc_Seq_keyval, NULL));

 86:   PetscCallMPI(MPI_Comm_dup(comm, &local_comm));
 87:   PetscCall(PetscMalloc1(1, &addr_local_comm));

 89:   *addr_local_comm = local_comm;

 91:   PetscCallMPI(MPI_Comm_set_attr(comm, Petsc_Seq_keyval, (void *)addr_local_comm));
 92:   PetscCall(PetscSequentialPhaseBegin_Private(local_comm, ng));
 93:   PetscFunctionReturn(PETSC_SUCCESS);
 94: }

 96: /*@
 97:    PetscSequentialPhaseEnd - Ends a sequential section of code.

 99:    Collective

101:    Input Parameters:
102: +  comm - Communicator to sequentialize.
103: -  ng   - Number in processor group.  This many processes are allowed to execute
104:    at the same time (usually 1)

106:    Level: intermediate

108:    Note:
109:    See `PetscSequentialPhaseBegin()` for more details.

111: .seealso: `PetscSequentialPhaseBegin()`
112: @*/
113: PetscErrorCode PetscSequentialPhaseEnd(MPI_Comm comm, int ng)
114: {
115:   PetscMPIInt size, flag;
116:   MPI_Comm    local_comm, *addr_local_comm;

118:   PetscFunctionBegin;
119:   PetscCallMPI(MPI_Comm_size(comm, &size));
120:   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);

122:   PetscCallMPI(MPI_Comm_get_attr(comm, Petsc_Seq_keyval, (void **)&addr_local_comm, &flag));
123:   PetscCheck(flag, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Wrong MPI communicator; must pass in one used with PetscSequentialPhaseBegin()");
124:   local_comm = *addr_local_comm;

126:   PetscCall(PetscSequentialPhaseEnd_Private(local_comm, ng));

128:   PetscCall(PetscFree(addr_local_comm));
129:   PetscCallMPI(MPI_Comm_free(&local_comm));
130:   PetscCallMPI(MPI_Comm_delete_attr(comm, Petsc_Seq_keyval));
131:   PetscFunctionReturn(PETSC_SUCCESS);
132: }

134: /*@C
135:   PetscGlobalMinMaxInt - Get the global min/max from local min/max input

137:   Collective

139:   Input Parameter:
140: . minMaxVal - An array with the local min and max

142:   Output Parameter:
143: . minMaxValGlobal - An array with the global min and max

145:   Level: beginner

147: .seealso: `PetscSplitOwnership()`, `PetscGlobalMinMaxReal()`
148: @*/
149: PetscErrorCode PetscGlobalMinMaxInt(MPI_Comm comm, const PetscInt minMaxVal[2], PetscInt minMaxValGlobal[2])
150: {
151:   PetscInt sendbuf[3], recvbuf[3];

153:   PetscFunctionBegin;
154:   sendbuf[0] = -minMaxVal[0]; /* Note that -PETSC_MIN_INT = PETSC_MIN_INT */
155:   sendbuf[1] = minMaxVal[1];
156:   sendbuf[2] = (minMaxVal[0] == PETSC_MIN_INT) ? 1 : 0; /* Are there PETSC_MIN_INT in minMaxVal[0]? */
157:   PetscCall(MPIU_Allreduce(sendbuf, recvbuf, 3, MPIU_INT, MPI_MAX, comm));
158:   minMaxValGlobal[0] = recvbuf[2] ? PETSC_MIN_INT : -recvbuf[0];
159:   minMaxValGlobal[1] = recvbuf[1];
160:   PetscFunctionReturn(PETSC_SUCCESS);
161: }

163: /*@C
164:   PetscGlobalMinMaxReal - Get the global min/max from local min/max input

166:   Collective

168:   Input Parameter:
169: . minMaxVal - An array with the local min and max

171:   Output Parameter:
172: . minMaxValGlobal - An array with the global min and max

174:   Level: beginner

176: .seealso: `PetscSplitOwnership()`, `PetscGlobalMinMaxInt()`
177: @*/
178: PetscErrorCode PetscGlobalMinMaxReal(MPI_Comm comm, const PetscReal minMaxVal[2], PetscReal minMaxValGlobal[2])
179: {
180:   PetscReal sendbuf[2];

182:   PetscFunctionBegin;
183:   sendbuf[0] = -minMaxVal[0];
184:   sendbuf[1] = minMaxVal[1];
185:   PetscCall(MPIU_Allreduce(sendbuf, minMaxValGlobal, 2, MPIU_REAL, MPIU_MAX, comm));
186:   minMaxValGlobal[0] = -minMaxValGlobal[0];
187:   PetscFunctionReturn(PETSC_SUCCESS);
188: }