Actual source code: psplit.c


  2: #include <petscsys.h>

  4: /*@
  5:     PetscSplitOwnershipBlock - Given a global (or local) length determines a local
  6:         (or global) length via a simple formula. Splits so each processors local size
  7:         is divisible by the block size.

  9:    Collective (if `N` is `PETSC_DECIDE`)

 11:    Input Parameters:
 12: +    comm - MPI communicator that shares the object being divided
 13: .    bs - block size
 14: .    n - local length (or `PETSC_DECIDE` to have it set)
 15: -    N - global length (or `PETSC_DECIDE`)

 17:   Level: developer

 19:    Notes:
 20:      `n` and `N` cannot be both `PETSC_DECIDE`

 22:      If one processor calls this with `N` of `PETSC_DECIDE` then all processors
 23:      must, otherwise the program will hang.

 25: .seealso: `PetscSplitOwnership()`, `PetscSplitOwnershipEqual()`
 26: @*/
 27: PetscErrorCode PetscSplitOwnershipBlock(MPI_Comm comm, PetscInt bs, PetscInt *n, PetscInt *N)
 28: {
 29:   PetscMPIInt size, rank;

 31:   PetscFunctionBegin;
 32:   PetscCheck(*N != PETSC_DECIDE || *n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Both n and N cannot be PETSC_DECIDE");

 34:   if (*N == PETSC_DECIDE) {
 35:     PetscCheck(*n % bs == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "local size %" PetscInt_FMT " not divisible by block size %" PetscInt_FMT, *n, bs);
 36:     PetscCall(MPIU_Allreduce(n, N, 1, MPIU_INT, MPI_SUM, comm));
 37:   } else if (*n == PETSC_DECIDE) {
 38:     PetscInt Nbs = *N / bs;
 39:     PetscCallMPI(MPI_Comm_size(comm, &size));
 40:     PetscCallMPI(MPI_Comm_rank(comm, &rank));
 41:     *n = bs * (Nbs / size + ((Nbs % size) > rank));
 42:   }
 43:   PetscFunctionReturn(PETSC_SUCCESS);
 44: }

 46: /*@
 47:     PetscSplitOwnership - Given a global (or local) length determines a local
 48:         (or global) length via a simple formula

 50:    Collective (if `n` or `N` is `PETSC_DECIDE`)

 52:    Input Parameters:
 53: +    comm - MPI communicator that shares the object being divided
 54: .    n - local length (or `PETSC_DECIDE` to have it set)
 55: -    N - global length (or `PETSC_DECIDE`)

 57:   Level: developer

 59:    Notes:
 60:      `n` and `N` cannot be both `PETSC_DECIDE`

 62:      If one processor calls this with `n` or `N` of `PETSC_DECIDE` then all processors
 63:      must. Otherwise, an error is thrown in debug mode while the program will hang
 64:      in optimized (i.e. configured --with-debugging=0) mode.

 66: .seealso: `PetscSplitOwnershipBlock()`, `PetscSplitOwnershipEqual()`
 67: @*/
 68: PetscErrorCode PetscSplitOwnership(MPI_Comm comm, PetscInt *n, PetscInt *N)
 69: {
 70:   PetscMPIInt size, rank;

 72:   PetscFunctionBegin;
 73:   PetscCheck(*N != PETSC_DECIDE || *n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Both n and N cannot be PETSC_DECIDE\n  likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee https://petsc.org/release/faq/#split-ownership");
 74:   if (PetscDefined(USE_DEBUG)) {
 75:     PetscMPIInt l[2], g[2];
 76:     l[0] = (*n == PETSC_DECIDE) ? 1 : 0;
 77:     l[1] = (*N == PETSC_DECIDE) ? 1 : 0;
 78:     PetscCallMPI(MPI_Comm_size(comm, &size));
 79:     PetscCall(MPIU_Allreduce(l, g, 2, MPI_INT, MPI_SUM, comm));
 80:     PetscCheck(!g[0] || g[0] == size, comm, PETSC_ERR_ARG_INCOMP, "All processes must supply PETSC_DECIDE for local size");
 81:     PetscCheck(!g[1] || g[1] == size, comm, PETSC_ERR_ARG_INCOMP, "All processes must supply PETSC_DECIDE for global size");
 82:   }

 84:   if (*N == PETSC_DECIDE) {
 85:     PetscInt64 m = *n, M;
 86:     PetscCall(MPIU_Allreduce(&m, &M, 1, MPIU_INT64, MPI_SUM, comm));
 87:     PetscCheck(M <= PETSC_MAX_INT, comm, PETSC_ERR_INT_OVERFLOW, "Global size overflow %" PetscInt64_FMT ". You may consider ./configure PETSc with --with-64-bit-indices for the case you are running", M);
 88:     *N = (PetscInt)M;
 89:   } else if (*n == PETSC_DECIDE) {
 90:     PetscCallMPI(MPI_Comm_size(comm, &size));
 91:     PetscCallMPI(MPI_Comm_rank(comm, &rank));
 92:     *n = *N / size + ((*N % size) > rank);
 93:   } else if (PetscDefined(USE_DEBUG)) {
 94:     PetscInt tmp;
 95:     PetscCall(MPIU_Allreduce(n, &tmp, 1, MPIU_INT, MPI_SUM, comm));
 96:     PetscCheck(tmp == *N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local lengths %" PetscInt_FMT " does not equal global length %" PetscInt_FMT ", my local length %" PetscInt_FMT "\n  likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee https://petsc.org/release/faq/#split-ownership", tmp, *N, *n);
 97:   }
 98:   PetscFunctionReturn(PETSC_SUCCESS);
 99: }

101: /*@
102:     PetscSplitOwnershipEqual - Given a global (or local) length determines a local
103:         (or global) length via a simple formula, trying to have all local lengths equal

105:    Collective (if `n` or `N` is `PETSC_DECIDE`)

107:    Input Parameters:
108: +    comm - MPI communicator that shares the object being divided
109: .    n - local length (or `PETSC_DECIDE` to have it set)
110: -    N - global length (or `PETSC_DECIDE`)

112:    Level: developer

114:    Notes:
115:      This is intended to be used with `MATSCALAPACK`, where the local size must
116:      be equal in all processes (except possibly the last one). For instance,
117:      the local sizes when splitting `N`=50 with 6 processes are 9,9,9,9,9,5

119:      n and N cannot be both `PETSC_DECIDE`

121:      If one processor calls this with `n` or `N` of `PETSC_DECIDE` then all processors
122:      must. Otherwise, an error is thrown in debug mode while the program will hang
123:      in optimized (i.e. configured --with-debugging=0) mode.

125: .seealso: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`
126: @*/
127: PetscErrorCode PetscSplitOwnershipEqual(MPI_Comm comm, PetscInt *n, PetscInt *N)
128: {
129:   PetscMPIInt size, rank;

131:   PetscFunctionBegin;
132:   PetscCheck(*N != PETSC_DECIDE || *n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Both n and N cannot be PETSC_DECIDE");
133:   if (PetscDefined(USE_DEBUG)) {
134:     PetscMPIInt l[2], g[2];
135:     l[0] = (*n == PETSC_DECIDE) ? 1 : 0;
136:     l[1] = (*N == PETSC_DECIDE) ? 1 : 0;
137:     PetscCallMPI(MPI_Comm_size(comm, &size));
138:     PetscCall(MPIU_Allreduce(l, g, 2, MPI_INT, MPI_SUM, comm));
139:     PetscCheck(!g[0] || g[0] == size, comm, PETSC_ERR_ARG_INCOMP, "All processes must supply PETSC_DECIDE for local size");
140:     PetscCheck(!g[1] || g[1] == size, comm, PETSC_ERR_ARG_INCOMP, "All processes must supply PETSC_DECIDE for global size");
141:   }

143:   if (*N == PETSC_DECIDE) {
144:     PetscInt64 m = *n, M;
145:     PetscCall(MPIU_Allreduce(&m, &M, 1, MPIU_INT64, MPI_SUM, comm));
146:     PetscCheck(M <= PETSC_MAX_INT, comm, PETSC_ERR_INT_OVERFLOW, "Global size overflow %" PetscInt64_FMT ". You may consider ./configure PETSc with --with-64-bit-indices for the case you are running", M);
147:     *N = (PetscInt)M;
148:   } else if (*n == PETSC_DECIDE) {
149:     PetscCallMPI(MPI_Comm_size(comm, &size));
150:     PetscCallMPI(MPI_Comm_rank(comm, &rank));
151:     *n = *N / size;
152:     if (*N % size) {
153:       if ((rank + 1) * (*n + 1) <= *N) *n = *n + 1;
154:       else if (rank * (*n + 1) <= *N) *n = *N - rank * (*n + 1);
155:       else *n = 0;
156:     }
157:   } else if (PetscDefined(USE_DEBUG)) {
158:     PetscInt tmp;
159:     PetscCall(MPIU_Allreduce(n, &tmp, 1, MPIU_INT, MPI_SUM, comm));
160:     PetscCheck(tmp == *N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local lengths %" PetscInt_FMT " does not equal global length %" PetscInt_FMT ", my local length %" PetscInt_FMT, tmp, *N, *n);
161:   }
162:   PetscFunctionReturn(PETSC_SUCCESS);
163: }